diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 45a33bb9..b68077ee 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,4 +1,4 @@ -name: Buffrs CLI +name: Buffrs CI on: push: @@ -16,6 +16,7 @@ jobs: steps: - uses: actions/checkout@v3 - run: rustup update && rustup component add rustfmt + - run: cargo install buffrs && cd registry && buffrs install - run: cargo fmt --check --all clippy: @@ -23,29 +24,12 @@ jobs: steps: - uses: actions/checkout@v3 - run: rustup update && rustup component add clippy + - run: cargo install buffrs && cd registry && buffrs install - name: Install Protoc uses: arduino/setup-protoc@v2 - uses: Swatinem/rust-cache@v2 - run: cargo clippy --all-targets --workspace -- -D warnings -D clippy::all - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - lfs: "true" - - run: rustup update - - uses: Swatinem/rust-cache@v2 - - name: Install Protoc - uses: arduino/setup-protoc@v2 - - name: Setup registry - uses: isbang/compose-action@v1.5.1 - with: - compose-file: "./registry/docker-compose.yml" - - run: cargo test --workspace - env: - RUST_BACKTRACE: 1 - deny: runs-on: ubuntu-latest steps: @@ -55,7 +39,7 @@ jobs: - run: cargo install cargo-deny || true - run: cargo deny --workspace check - coverage: + test: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -64,14 +48,14 @@ jobs: - run: rustup update - run: rustup component add llvm-tools-preview - uses: Swatinem/rust-cache@v2 + - run: cargo install buffrs && cd registry && buffrs install - run: cargo install cargo-llvm-cov || true - name: Install Protoc uses: arduino/setup-protoc@v2 - - name: Setup registry - uses: isbang/compose-action@v1.5.1 - with: - compose-file: "./registry/docker-compose.yml" + - run: cargo install buffrs && cd registry && buffrs install - run: cargo llvm-cov --workspace --fail-under-lines "$MINIMUM_LINE_COVERAGE_PERCENT" + env: + RUST_BACKTRACE: 1 typos: runs-on: ubuntu-latest diff --git a/.github/workflows/nix_ci_ubuntu.yml b/.github/workflows/nix.yml similarity index 75% rename from .github/workflows/nix_ci_ubuntu.yml rename to .github/workflows/nix.yml index bb7ba4b9..47a68624 100644 --- a/.github/workflows/nix_ci_ubuntu.yml +++ b/.github/workflows/nix.yml @@ -1,4 +1,4 @@ -name: Nix on Ubuntu Buffers CI +name: Nix on: push: @@ -8,8 +8,11 @@ on: workflow_dispatch: jobs: - check_flake_w_nix_on_ubuntu: - runs-on: ubuntu-latest + check: + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - uses: DeterminateSystems/nix-installer-action@main diff --git a/.github/workflows/nix_ci_mac.yml b/.github/workflows/nix_ci_mac.yml deleted file mode 100644 index 2eeb88e1..00000000 --- a/.github/workflows/nix_ci_mac.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Nix on Mac Buffers CI - -on: - push: - branches: - - main - workflow_dispatch: - -jobs: - check_flake_w_nix_on_mac: - runs-on: macos-latest - steps: - - uses: actions/checkout@v3 - - uses: DeterminateSystems/nix-installer-action@main - - uses: DeterminateSystems/magic-nix-cache-action@main - - name: Run `nix flake check` - run: nix flake check diff --git a/Cargo.lock b/Cargo.lock index 1d5d8b97..30ecc22a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -857,7 +857,7 @@ dependencies = [ [[package]] name = "buffrs" -version = "0.7.6" +version = "0.8.0" dependencies = [ "anyhow", "assert_cmd", @@ -879,7 +879,6 @@ dependencies = [ "pretty_assertions", "protobuf", "protobuf-parse", - "protoc", "reqwest", "semver", "serde", @@ -892,7 +891,6 @@ dependencies = [ "thiserror", "tokio", "toml", - "tonic-build", "tracing", "tracing-subscriber", "url", @@ -915,6 +913,7 @@ dependencies = [ "eyre", "proptest", "prost", + "prost-types", "rand", "semver", "serde", @@ -925,6 +924,7 @@ dependencies = [ "thiserror", "tokio", "tonic", + "tonic-build", "tonic-types", "tower", "tracing", @@ -2799,9 +2799,9 @@ dependencies = [ [[package]] name = "num_threads" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" dependencies = [ "libc", ] @@ -3177,16 +3177,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "protoc" -version = "2.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0218039c514f9e14a5060742ecd50427f8ac4f85a6dc58f2ddb806e318c55ee" -dependencies = [ - "log", - "which", -] - [[package]] name = "quick-error" version = "1.2.3" @@ -4371,9 +4361,9 @@ dependencies = [ [[package]] name = "tonic" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" +checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13" dependencies = [ "async-stream", "async-trait", @@ -4398,9 +4388,9 @@ dependencies = [ [[package]] name = "tonic-build" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d021fc044c18582b9a2408cd0dd05b1596e3ecdb5c4df822bb0183545683889" +checksum = "be4ef6dd70a610078cb4e338a0f79d06bc759ff1b22d2120c2ff02ae264ba9c2" dependencies = [ "prettyplease", "proc-macro2", @@ -4411,9 +4401,9 @@ dependencies = [ [[package]] name = "tonic-types" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b39bd850e4bf99146b3fd244019562cafd30338db068c5795c55b448eb02411" +checksum = "f4aa089471d8d4c60ec3aef047739713a4695f0b309d4cea0073bc55201064f4" dependencies = [ "prost", "prost-types", diff --git a/Cargo.toml b/Cargo.toml index 8cba583a..fa26d276 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,17 +1,17 @@ [package] name = "buffrs" -version = "0.7.6" +version = "0.8.0" edition = "2021" description = "Modern protobuf package management" authors = [ - "Mara Schulke ", "André Sá De Mello ", + "James Baker ", + "Mara Schulke ", "Patrick Elsen ", - "Tom Karwowski ", "Quentin Santos ", - "Thomas Pellissier-Tanon ", "Robert Fink ", - "James Baker ", + "Thomas Pellissier-Tanon ", + "Tom Karwowski ", ] repository = "https://github.com/helsing-ai/buffrs" documentation = "https://docs.rs/buffrs" @@ -27,7 +27,7 @@ members = ["registry"] [[bin]] name = "buffrs" path = "src/main.rs" -required-features = ["build", "git", "validation"] +required-features = ["git", "validation"] [[test]] name = "e2e" @@ -35,8 +35,7 @@ path = "tests/lib.rs" test = true [features] -default = ["build", "git", "validation"] -build = ["dep:tonic-build", "dep:protoc"] +default = ["git", "validation"] validation = ["dep:anyhow", "dep:protobuf", "dep:protobuf-parse", "dep:diff-struct"] git = [] @@ -53,16 +52,15 @@ human-panic = "1" miette = { version = "5.10.0", features = ["fancy"] } protobuf = { version = "3.3.0", optional = true } protobuf-parse = { version = "3.3.0", optional = true } -protoc = { version = "2.28.0", optional = true } reqwest = { version = "0.11", features = ["rustls-tls-native-roots"], default-features = false } semver = { version = "1", features = ["serde"] } serde = { version = "1", features = ["derive"] } +serde_json = "1" serde_typename = "0.1" tar = "0.4" thiserror = "1.0.49" tokio = { version = "^1.26", features = ["fs", "rt", "macros", "process", "io-std", "tracing"] } toml = "0.8.0" -tonic-build = { version = "0.10.0", optional = true } tracing = "0.1" tracing-subscriber = "0.3" url = { version = "2.4", features = ["serde"] } @@ -74,7 +72,7 @@ assert_cmd = "2.0" assert_fs = "1.0" axum = { version = "0.7.2", default-features = false, features = ["tokio", "http1"] } fs_extra = "1.3" -gix = { version = "0.58.0", default-features = false} +gix = { version = "0.58.0", default-features = false } hex = "0.4.3" paste = "1.0.14" predicates = "3.0" diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 580b82a9..0bfca219 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -21,8 +21,8 @@ * [Manifest vs Lockfile](guide/manifest-vs-lockfile.md) * [Buffrs Home](guide/buffrs-home.md) -* [Buffrs Integrations](integrations/index.md) - * [Cargo](integrations/cargo.md) +* [Buffrs Integrations]() + * [Cargo]() * [Poetry]() * [Npm]() diff --git a/docs/src/commands/buffrs-generate.md b/docs/src/commands/buffrs-generate.md deleted file mode 100644 index 6b4bd879..00000000 --- a/docs/src/commands/buffrs-generate.md +++ /dev/null @@ -1,14 +0,0 @@ -## buffrs generate - -Generates source code for a given language. - -### Synopsis - -`buffrs generate --lang ` - -### Description - -This command uses a protocol buffer compiler to transform the `.proto` files -under the `proto/` subtree to auto-generated source code. - -Currently the only supported value for the language is `python`. diff --git a/docs/src/commands/buffrs-list.md b/docs/src/commands/buffrs-list.md index b3678d86..db6a1046 100644 --- a/docs/src/commands/buffrs-list.md +++ b/docs/src/commands/buffrs-list.md @@ -10,9 +10,7 @@ Lists all protobuf files (`.proto`) managed by Buffrs to standard out. This command lists all protobuf files managed by Buffrs. This way the output can be fed dynamically into external code generation tools like -`protoc` to do customize the behavior of the generator beyond the capabilities -that Buffrs provides out of the box through [`buffrs -generate`](./buffrs-generate.md). +`protoc`. ### Example diff --git a/docs/src/commands/buffrs-lock-print-files.md b/docs/src/commands/buffrs-lock-print-files.md new file mode 100644 index 00000000..886028f8 --- /dev/null +++ b/docs/src/commands/buffrs-lock-print-files.md @@ -0,0 +1,40 @@ +## buffrs lock print-files + +Prints the locked files as JSON to stdout. + +### Synopsis + +`buffrs lock print-files` + +### Description + +> Note: This command is designed for consumption through other scripts and +> programs. + +Using this command you can retrieve a list of files that buffrs downloads +according to the lockfile. For correct behavior please make sure your +`Proto.lock` is up to date when using this command! + +### Example + +Given a project that depends on a `physics` package at version `1.0.0` and a +populated `Proto.lock`: + +``` + +``` + +Running `buffrs lock print-files` will print the following output derived from +the lockfile: + +``` +[ + { + "url": "https://your.internal.registry/artifactory/your-repository/physics/physics-1.0.0.tgz", + "digest": "sha256:61ecdcd949c7b234160dc5aacb4546a21512de4ff8ea85f2fdd7d5fff2bf92b5" + } +] +``` + +This way you can programmatically consume this (e.g. in nix, bash, etc) and +download the files if your project while maintaining integrity. diff --git a/docs/src/commands/buffrs-package.md b/docs/src/commands/buffrs-package.md index 14ebed1f..6b3d193d 100644 --- a/docs/src/commands/buffrs-package.md +++ b/docs/src/commands/buffrs-package.md @@ -6,26 +6,18 @@ Generates a release tarball for the package in the current directory. `buffrs package` -`buffrs package --output-directory ` +### Options + +* `--dry-run`: prevents buffrs from actually writing the tarball to the filesystem +* `--output-directory`: allows you to specify a directory to output the package +* `--set-version`: allows you to override the version set in the manifest -`buffrs package --dry-run` ### Description Like the [`publish`](buffrs-publish.md) command, the `package` command bundles -the package's protocol buffer files and manifest into a gzip-compressed tarball. -However, unlike the [`publish`](buffrs-publish.md) command it does not actually -interact with the registry, instead it only writes the release tarball into the -current directory. This is useful for manual distribution and for safely -validating the package setup. - -#### Supported package types - -Both library and API packages can be released -- the only exception is -implementation packages, which are deemed to be terminal packages in the -dependency graph. This may change in the future. More details in [Package -Types](../guide/package-types.md). - -Library packages cannot have dependencies, so releasing this kind of package may -fail if any are provided in the manifest. API dependencies on library packages -is also forbidden and will cause releases to fail to be generated. \ No newline at end of file +the package's protocol buffer files and manifest into a gzip-compressed +tarball. However, unlike the [`publish`](buffrs-publish.md) command it does not +actually interact with the registry, instead it only writes the release tarball +into the current directory. This is useful for manual distribution and for +safely validating the package setup. diff --git a/docs/src/commands/buffrs-publish.md b/docs/src/commands/buffrs-publish.md index c9485c89..bde99b55 100644 --- a/docs/src/commands/buffrs-publish.md +++ b/docs/src/commands/buffrs-publish.md @@ -12,6 +12,7 @@ Generates a release and publishes it to the specified registry. uncommitted changes. * `--dry-run`: causes a release bundle to be generated but skips uploading to the registry. +* `--set-version`: allows you to override the version set in the manifest ### Description @@ -34,4 +35,4 @@ Only Buffrs libraries and API packages can be packaged and published. More detai Library packages cannot have dependencies, so releasing this kind of package may fail if any are provided in the manifest. API dependencies on library packages -is also forbidden and will cause publication to fail. \ No newline at end of file +is also forbidden and will cause publication to fail. diff --git a/docs/src/commands/build-commands.md b/docs/src/commands/build-commands.md index 6e1a0d65..265fe421 100644 --- a/docs/src/commands/build-commands.md +++ b/docs/src/commands/build-commands.md @@ -5,4 +5,4 @@ documentation from Buffrs-managed protocol buffer files. ## Index -* [buffrs generate](buffrs-generate.md) \ No newline at end of file +* [buffrs list](buffrs-list.md) diff --git a/docs/src/commands/index.md b/docs/src/commands/index.md index c8b5ca9d..73d522b3 100644 --- a/docs/src/commands/index.md +++ b/docs/src/commands/index.md @@ -16,7 +16,6 @@ generally be more up-to-date. * [buffrs](buffrs.md) * [buffrs help](buffrs-help.md) * [Build Commands](build-commands.md) - * [buffrs generate](buffrs-generate.md) * [buffrs list](buffrs-list.md) * [Manifest Commands](manifest-commands.md) * [buffrs add](buffrs-add.md) diff --git a/docs/src/integrations/cargo.md b/docs/src/integrations/cargo.md deleted file mode 100644 index 72200ab3..00000000 --- a/docs/src/integrations/cargo.md +++ /dev/null @@ -1,54 +0,0 @@ -# Integrating Buffrs with Cargo - -To integrate Buffrs into your Cargo workflow, the `buffrs` crate on crates.io -is available. It contains types and functionality to interact with buffrs -programmatically (as opposed to the cli). - -To enable your project to interact with buffrs programmatically you need to add -the `buffrs` crate to your `[build-dependencies]` section: - -```toml -# .. - -[build-dependencies] -buffrs = "" -``` - -This tells Cargo to make the `buffrs` crate available within your build scripts -(contained in `build.rs`) and enables us to instruct Cargo to build the Rust -language bindings when your project is compiled via `buffrs::build` an out of -the box build script which utilizes tonic and prost. - -`build.rs`: - -```rust -fn main() { - buffrs::build().unwrap(); -} -``` - -**Note**: from buffrs 0.6.4 you should not specify `buffrs::Language::Rust` in `build()` - -Invoking `buffrs::build` will: - -1. Download all missing dependencies (enabling your project to just work with - `cargo run`) -2. Compile locally defined Buffrs packages (if present) -3. Compile all dependencies specified in your `Proto.toml` (if present) -4. Output the language bindings into Cargo's `OUT_DIR` - -## Using the generated bindings - -To use the generated rust code within your application code, you can either use -the `buffrs::include!` macro, or use the std version and manually locate the -buffrs module. - -```rust -// Using buffrs -mod proto { buffrs::include!(); } - -// Using std -mod proto { - include!(concat!(env!("OUT_DIR"), "/buffrs.rs")); -} -``` diff --git a/docs/src/integrations/index.md b/docs/src/integrations/index.md index 01413d77..48dd52f3 100644 --- a/docs/src/integrations/index.md +++ b/docs/src/integrations/index.md @@ -7,9 +7,7 @@ problem that engineers are facing](../guide/why-buffrs-exists.md). Ensuring a _works out of the box_ experience through integrating into common build systems is the next one. -We are providing integrations for the following build systems at the moment: - -* [Cargo](./cargo.md) +Currently we are providing the following integrations: If you are missing your favorite build system, please create an issue on GitHub to request it – or ideally help us by building or researching parts the diff --git a/flake.nix b/flake.nix index 43a507cc..0090df17 100644 --- a/flake.nix +++ b/flake.nix @@ -6,12 +6,10 @@ url = "github:ipetkov/crane"; inputs.nixpkgs.follows = "nixpkgs"; }; - rust-overlay = { url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; - advisory-db = { url = "github:rustsec/advisory-db"; flake = false; @@ -36,21 +34,19 @@ devTools = [ rustToolchain ]; dependencies = with pkgs; - [ libgit2 libiconv openssl openssl.dev ] - ++ lib.lists.optionals stdenv.isDarwin darwinFrameworks; + [ libiconv ] ++ lib.lists.optionals stdenv.isDarwin darwinFrameworks; nativeBuildInputs = with pkgs; [ pkg-config ] ++ dependencies; buildEnvVars = { NIX_LDFLAGS = [ "-L" "${pkgs.libiconv}/lib" ]; - - LIBGIT2_NO_VENDOR = 1; OPENSSL_NO_VENDOR = 1; }; buffrs = callPackage ./nix/buffrs.nix { inherit crane advisory-db buildEnvVars nativeBuildInputs rustToolchain; + buildInputs = [ rustToolchain ]; }; in { @@ -59,6 +55,9 @@ packages.default = buffrs.package; apps.default = flake-utils.lib.mkApp { drv = buffrs.package; }; + lib.vendorDependencies = + pkgs.callPackage ./nix/cache.nix { buffrs = buffrs.package; }; + devShells.default = pkgs.mkShell ({ inherit nativeBuildInputs; buildInputs = devTools ++ dependencies; diff --git a/nix/cache.nix b/nix/cache.nix new file mode 100644 index 00000000..1d9f2bf9 --- /dev/null +++ b/nix/cache.nix @@ -0,0 +1,41 @@ +{ fetchurl, runCommand, lib, buffrs, symlinkJoin }: + +lockfile: + +let + src = runCommand "vendor-lockfile" { } '' + mkdir -p $out + cp ${lockfile} $out/Proto.lock + ''; + + fileRequirementsJson = + runCommand "buffrs-urls" { buildInputs = [ buffrs ]; } '' + cd ${src} + buffrs lock print-files > $out + ''; + + fileRequirements = builtins.fromJSON (builtins.readFile fileRequirementsJson); + + cachePackage = (file: + let + prefix = "sha256:"; + + sha256 = assert lib.strings.hasPrefix prefix file.digest; + lib.strings.removePrefix prefix file.digest; + + tar = fetchurl { + inherit sha256; + url = file.url; + }; + in runCommand "cache-${file.package}" { } '' + mkdir -p $out + cp ${tar} $out/${file.package}.sha256.${sha256}.tgz + ''); + + cache = map cachePackage fileRequirements; +in { + BUFFRS_CACHE = symlinkJoin { + name = "buffrs-cache"; + paths = cache; + }; +} diff --git a/registry/Cargo.toml b/registry/Cargo.toml index 4ad1b31b..5cfebb54 100644 --- a/registry/Cargo.toml +++ b/registry/Cargo.toml @@ -12,11 +12,12 @@ atmosphere = { version = "0.1.3", features = ["postgres"] } sqlx = { version = "0.7.3", features = ["runtime-tokio", "chrono", "postgres", "migrate"] } aws-config = { version = "1.1.2", optional = true } aws-sdk-s3 = { version = "1.12.0", optional = true } -buffrs = { path = "../", version = "0.7.5" } +buffrs = { path = "../", version = "0.8.0" } bytes = "1.5.0" -prost = "0.12.3" -tonic = "0.10" -tonic-types = "0.10" +tonic = "0.11" +prost = "0.12" +prost-types = "0.12" +tonic-types = "0.11" clap = { version = "4.4", features = ["cargo", "derive", "env"] } eyre = "0.6.11" semver = "1.0.21" @@ -31,7 +32,9 @@ serde = "1.0" sha3 = "0.10.8" [build-dependencies] -buffrs = { path = "../", version = "0.7.1" } +buffrs = { path = "../", version = "0.8.0" } +tokio = { version = "1", features = ["full"] } +tonic-build = "0.11" [features] default = [] diff --git a/registry/build.rs b/registry/build.rs index 49555b70..23572192 100644 --- a/registry/build.rs +++ b/registry/build.rs @@ -1,5 +1,50 @@ -// (c) Copyright 2023 Helsing GmbH. All rights reserved. +use std::{env, path::Path}; -fn main() { - buffrs::build().expect("failed to compile protos"); +use buffrs::package::PackageStore; + +#[tokio::main] +async fn main() { + let cwd = { + let root = env!("CARGO_MANIFEST_DIR"); + + let mut workspace_dir = Path::new(root); + + while !workspace_dir.ends_with("buffrs") { + workspace_dir = workspace_dir + .parent() + .expect("no path ending in 'buffrs' found in {root}"); + } + + let dir = workspace_dir.join("registry"); + + assert!( + dir.is_dir(), + "current directory not found in {}", + workspace_dir.display() + ); + + dir + }; + + env::set_current_dir(&cwd).unwrap(); + + let store = cwd.join(PackageStore::PROTO_VENDOR_PATH); + + dbg!(&store); + + let protos = PackageStore::open(&cwd) + .await + .unwrap() + .collect(&store, true) + .await; + + let includes = &[store]; + + tonic_build::configure() + .build_client(true) + .build_server(true) + .build_transport(true) + .include_file("buffrs.rs") + .compile(&protos, includes) + .unwrap(); } diff --git a/registry/docker-compose.yml b/registry/docker-compose.yml index 69ee1060..c496bc5c 100644 --- a/registry/docker-compose.yml +++ b/registry/docker-compose.yml @@ -21,15 +21,3 @@ services: MINIO_DOMAIN: localhost ports: - 127.0.0.1:9000:9000 - database-migration: - image: postgres:16 - environment: - POSTGRES_PASSWORD: buffrs - POSTGRES_USER: buffrs - POSTGRES_HOST: database - depends_on: - database: - condition: service_healthy - volumes: - - "./migrations:/migrations" - command: ["/bin/bash", "/migrations/up.sh", "/migrations"] diff --git a/registry/migrations/up.sh b/registry/migrations/up.sh deleted file mode 100644 index 5342d2c9..00000000 --- a/registry/migrations/up.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -e -export PGPASSWORD=$POSTGRES_PASSWORD -echo "Applying migrations..." -for file in "$1"/*up.sql -do - echo "Applying migration $file" - psql --username "$POSTGRES_USER" --dbname "buffrs" --port 5432 -h "$POSTGRES_HOST" -f "$file" -done -echo "Migrations applied!" diff --git a/registry/src/proto.rs b/registry/src/proto.rs index b878f65d..4cf48439 100644 --- a/registry/src/proto.rs +++ b/registry/src/proto.rs @@ -18,4 +18,4 @@ #![allow(missing_docs)] -::buffrs::include!(); +::tonic::include_proto!("buffrs"); diff --git a/registry/tests/api/main.rs b/registry/tests/api/main.rs index 817f1d72..2ebfe39b 100644 --- a/registry/tests/api/main.rs +++ b/registry/tests/api/main.rs @@ -101,6 +101,7 @@ pub async fn basic_setup(pool: PgPool) -> RegistryClient { } #[sqlx::test] +#[ignore = "unused"] async fn test_publish_registry(pool: PgPool) { let mut client = basic_setup(pool).await; @@ -136,6 +137,7 @@ async fn test_publish_registry(pool: PgPool) { } #[sqlx::test] +#[ignore = "unused"] async fn test_fetching_versions(pool: PgPool) { let mut client = basic_setup(pool).await; diff --git a/src/cache.rs b/src/cache.rs new file mode 100644 index 00000000..5d0d1016 --- /dev/null +++ b/src/cache.rs @@ -0,0 +1,247 @@ +// Copyright 2023 Helsing GmbH +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::{ + path::{Path, PathBuf}, + str::FromStr, +}; + +use bytes::Bytes; +use miette::{miette, Context, IntoDiagnostic}; +use walkdir::WalkDir; + +use crate::{ + lock::{Digest, DigestAlgorithm, FileRequirement, LockedPackage}, + package::{Package, PackageName}, +}; + +/// The environment variable that overrides the default cache location +const CACHE_ENV_VAR: &str = "BUFFRS_CACHE"; +/// The default cache directory name +const CACHE_DIRECTORY: &str = "cache"; + +/// A instance of a cache +pub struct Cache(PathBuf); + +impl Cache { + /// Open the cache + pub async fn open() -> miette::Result { + if let Ok(cache) = std::env::var(CACHE_ENV_VAR).map(PathBuf::from) { + if let false = tokio::fs::try_exists(&cache) + .await + .into_diagnostic() + .wrap_err_with(|| { + miette!( + "internal: failed to verify if cache set by {CACHE_ENV_VAR} ({}) exists", + cache.display() + ) + })? + { + tokio::fs::create_dir_all(&cache) + .await + .into_diagnostic() + .wrap_err_with(|| { + miette!( + "internal: failed to initialize cache dir set by {CACHE_ENV_VAR} ({})", + cache.display() + ) + })? + } + + let path = tokio::fs::canonicalize(cache) + .await + .into_diagnostic() + .wrap_err("failed to canonicalize cache directory")?; + + let cache = Self::new(path).await?; + + return Ok(cache); + } + + let path = crate::home()?.join(CACHE_DIRECTORY); + + let cache = Self::new(path).await?; + + Ok(cache) + } + + /// Create a new buffrs cache at a given location + /// + /// This function is idempotent so multiple invocations of the same path + /// will not modify the filesystem contents. + pub async fn new(path: PathBuf) -> miette::Result { + let exists = tokio::fs::try_exists(&path).await.into_diagnostic()?; + + if !exists { + tokio::fs::create_dir_all(&path).await.ok(); + } + + let cache = Self(path); + + cache.homogenize().await?; + + Ok(cache) + } + + /// Homogenize the cache contents to adhere to the cache specification of buffrs + /// + /// Note: This function removes all malformed contents of the cache in an idempotent manner. + /// Please be cautions when calling this function on arbitrary directories as subcontents may + /// be removed. + pub async fn homogenize(&self) -> miette::Result<()> { + let dir = WalkDir::new(self.path()) + .max_depth(1) + .into_iter() + .filter_map(|e| e.ok()); + + let (dirs, files): (Vec<_>, Vec<_>) = dir.partition(|e| e.path().is_dir()); + + let invalid_dirs = dirs.into_iter().filter(|d| d.path() != self.path()); + + for dir in invalid_dirs { + tracing::debug!("removing invalid cache entry: {}", dir.path().display()); + + tokio::fs::remove_dir_all(dir.path()) + .await + .into_diagnostic() + .wrap_err_with(|| miette!( + "cache contained an unexpected subdirectory ({}) and buffrs was unable to clean it up", + dir.path().display() + ))?; + } + + let invalid_files = files.into_iter().filter(|f| { + let filename = f.path().file_name().unwrap_or_default().to_string_lossy(); + + let parts: Vec<_> = filename.split('.').collect(); + + // invalid – we should have: {name}.{type}.{digest}.{ext} + if parts.len() != 4 { + return true; + } + + // package name part is invalid + if PackageName::new(parts[0]).is_err() { + return true; + } + + // unknown / unsupported digest algorithm + let Ok(alg) = DigestAlgorithm::from_str(parts[1]) else { + return true; + }; + + // invalid digest + if Digest::from_str(&format!("{alg}:{}", parts[2])).is_err() { + return true; + } + + // invalid extension + if parts[3] != "tgz" { + return true; + } + + false + }); + + for file in invalid_files { + tracing::debug!("removing invalid cache entry: {}", file.path().display()); + + tokio::fs::remove_file(file.path()) + .await + .into_diagnostic() + .wrap_err_with(|| { + miette!( + "cache contained an unexpected file ({}) and buffrs was unable to clean it up", + file.path().display() + ) + })?; + } + + Ok(()) + } + + /// Resolve a file requirement from the cache + pub async fn get(&self, file: FileRequirement) -> miette::Result> { + let entry: Entry = file.into(); + + let file = self.path().join(entry.filename()); + + let tgz = tokio::fs::read(&file) + .await + .into_diagnostic() + .map(bytes::Bytes::from); + + if let Ok(tgz) = tgz { + let pkg = Package::parse(tgz)?; + + return Ok(Some(pkg)); + } + + Ok(None) + } + + /// Put a locked package in the cache + pub async fn put(&self, package: &LockedPackage, bytes: Bytes) -> miette::Result<()> { + let entry: Entry = FileRequirement::from(package).into(); + + let file = self.path().join(entry.filename()); + + tokio::fs::write(&file, bytes.as_ref()) + .await + .into_diagnostic() + .wrap_err(miette!( + "failed to put package {} in the cache", + package.name + ))?; + + Ok(()) + } + + /// The directory in the filesystem used by this cache + pub fn path(&self) -> &Path { + self.0.as_path() + } +} + +/// A cache locator to store or retrieve a package +/// +/// This follows the naming scheme of {package-name}-{digest-type}-{digest}.tgz +pub struct Entry(PathBuf); + +impl Entry { + /// The filename of the cache entry + pub fn filename(&self) -> &Path { + self.0.as_path() + } +} + +impl From for Entry { + fn from(req: FileRequirement) -> Entry { + Self::from(&req) + } +} + +impl From<&FileRequirement> for Entry { + fn from(req: &FileRequirement) -> Entry { + Self( + format!( + "{}.{}.{}.tgz", + req.package, + req.digest.algorithm(), + hex::encode(req.digest.as_bytes()) + ) + .into(), + ) + } +} diff --git a/src/command.rs b/src/command.rs index c3fcb5f3..3c6398eb 100644 --- a/src/command.rs +++ b/src/command.rs @@ -13,6 +13,7 @@ // limitations under the License. use crate::{ + cache::Cache, credentials::Credentials, lock::{LockedPackage, Lockfile}, manifest::{Dependency, Manifest, PackageManifest, MANIFEST_FILE}, @@ -21,11 +22,6 @@ use crate::{ resolver::DependencyGraph, }; -#[cfg(feature = "build")] -use crate::generator::{Generator, Language}; -#[cfg(feature = "build")] -use std::path::PathBuf; - use async_recursion::async_recursion; use miette::{bail, ensure, miette, Context as _, IntoDiagnostic}; use semver::{Version, VersionReq}; @@ -167,10 +163,22 @@ pub async fn remove(package: PackageName) -> miette::Result<()> { } /// Packages the api and writes it to the filesystem -pub async fn package(directory: impl AsRef, dry_run: bool) -> miette::Result<()> { - let manifest = Manifest::read().await?; +pub async fn package( + directory: impl AsRef, + dry_run: bool, + version: Option, +) -> miette::Result<()> { + let mut manifest = Manifest::read().await?; let store = PackageStore::current().await?; + if let Some(version) = version { + if let Some(ref mut package) = manifest.package { + tracing::info!(":: modified version in published manifest to {version}"); + + package.version = version; + } + } + if let Some(ref pkg) = manifest.package { store.populate(pkg).await?; } @@ -195,28 +203,42 @@ pub async fn package(directory: impl AsRef, dry_run: bool) -> miette::Resu )) } -#[cfg(feature = "git")] -async fn git_statuses() -> miette::Result> { - let output = tokio::process::Command::new("git") - .arg("status") - .arg("--porcelain") - .output() - .await; - - let output = match output { - Ok(output) => output, - Err(e) => { - tracing::error!("failed to run `git status`: {}", e); +/// Publishes the api package to the registry +pub async fn publish( + registry: RegistryUri, + repository: String, + #[cfg(feature = "git")] allow_dirty: bool, + dry_run: bool, + version: Option, +) -> miette::Result<()> { + #[cfg(feature = "git")] + async fn git_statuses() -> miette::Result> { + use std::process::Stdio; + + let output = tokio::process::Command::new("git") + .arg("status") + .arg("--porcelain") + .stderr(Stdio::null()) + .output() + .await; + + let output = match output { + Ok(output) => output, + Err(_) => { + return Ok(Vec::new()); + } + }; + + if !output.status.success() { return Ok(Vec::new()); } - }; - let statuses = if output.status.success() { let stdout = String::from_utf8(output.stdout) .into_diagnostic() .wrap_err(miette!( "invalid utf-8 character in the output of `git status`" ))?; + let lines: Option> = stdout .lines() .map(|line| { @@ -225,34 +247,11 @@ async fn git_statuses() -> miette::Result> { }) .collect(); - if let Some(statuses) = lines { - statuses - } else { - tracing::warn!("failed to parse `git status` output: {}", stdout); - Vec::new() - } - } else { - let stderr = String::from_utf8(output.stderr) - .into_diagnostic() - .wrap_err(miette!( - "invalid utf-8 character in the error output of `git status`" - ))?; - tracing::error!("`git status` returned an error: {}", stderr); - Vec::new() - }; - Ok(statuses) -} + Ok(lines.unwrap_or_default()) + } -/// Publishes the api package to the registry -pub async fn publish( - registry: RegistryUri, - repository: String, - #[cfg(feature = "git")] allow_dirty: bool, - dry_run: bool, -) -> miette::Result<()> { #[cfg(feature = "git")] - { - let statuses = git_statuses().await?; + if let Ok(statuses) = git_statuses().await { if !allow_dirty && !statuses.is_empty() { tracing::error!("{} files in the working directory contain changes that were not yet committed into git:\n", statuses.len()); @@ -264,11 +263,19 @@ pub async fn publish( } } - let manifest = Manifest::read().await?; + let mut manifest = Manifest::read().await?; let credentials = Credentials::load().await?; let store = PackageStore::current().await?; let artifactory = Artifactory::new(registry, &credentials)?; + if let Some(version) = version { + if let Some(ref mut package) = manifest.package { + tracing::info!(":: modified version in published manifest to {version}"); + + package.version = version; + } + } + if let Some(ref pkg) = manifest.package { store.populate(pkg).await?; } @@ -289,6 +296,7 @@ pub async fn install() -> miette::Result<()> { let lockfile = Lockfile::read_or_default().await?; let store = PackageStore::current().await?; let credentials = Credentials::load().await?; + let cache = Cache::open().await?; store.clear().await?; @@ -299,7 +307,7 @@ pub async fn install() -> miette::Result<()> { } let dependency_graph = - DependencyGraph::from_manifest(&manifest, &lockfile, &credentials.into()) + DependencyGraph::from_manifest(&manifest, &lockfile, &credentials.into(), &cache) .await .wrap_err(miette!("dependency resolution failed"))?; @@ -428,15 +436,6 @@ pub async fn lint() -> miette::Result<()> { Ok(()) } -/// Generate bindings for a given language -#[cfg(feature = "build")] -pub async fn generate(language: Language, out_dir: PathBuf) -> miette::Result<()> { - Generator::Protoc { language, out_dir } - .generate() - .await - .wrap_err(miette!("failed to generate {language} bindings")) -} - /// Logs you in for a registry pub async fn login(registry: RegistryUri) -> miette::Result<()> { let mut credentials = Credentials::load().await?; @@ -475,6 +474,26 @@ pub async fn logout(registry: RegistryUri) -> miette::Result<()> { credentials.write().await } +/// Commands on the lockfile +pub mod lock { + use super::*; + use crate::lock::FileRequirement; + + /// Prints the file requirements serialized as JSON + pub async fn print_files() -> miette::Result<()> { + let lock = Lockfile::read().await?; + + let requirements: Vec = lock.into(); + + // hint: always ok, as per serde_json doc + if let Ok(json) = serde_json::to_string_pretty(&requirements) { + println!("{json}"); + } + + Ok(()) + } +} + #[cfg(test)] mod tests { use super::DependencyLocator; diff --git a/src/credentials.rs b/src/credentials.rs index 372f1181..a0f79a9c 100644 --- a/src/credentials.rs +++ b/src/credentials.rs @@ -12,10 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -use miette::{miette, Context, Diagnostic, IntoDiagnostic}; +use miette::{Context, IntoDiagnostic}; use serde::{Deserialize, Serialize}; -use std::{collections::HashMap, env, io::ErrorKind, path::PathBuf}; -use thiserror::Error; +use std::{collections::HashMap, io::ErrorKind, path::PathBuf}; use tokio::fs; use crate::{ @@ -24,8 +23,6 @@ use crate::{ ManagedFile, }; -/// Global configuration directory for `buffrs` -pub const BUFFRS_HOME: &str = ".buffrs"; /// Filename of the credential store pub const CREDENTIALS_FILE: &str = "credentials.toml"; @@ -38,26 +35,14 @@ pub struct Credentials { pub registry_tokens: HashMap, } -const BUFFRS_HOME_VAR: &str = "BUFFRS_HOME"; - -#[derive(Error, Diagnostic, Debug)] -#[error("could not determine credentials location")] -struct LocateError(#[diagnostic_source] miette::Report); - -fn location() -> Result { - env::var(BUFFRS_HOME_VAR) - .map(PathBuf::from) - .or_else(|_| { - home::home_dir() - .ok_or_else(|| miette!("{BUFFRS_HOME_VAR} is not set and the user's home folder could not be determined")) - }) - .map(|home| home.join(BUFFRS_HOME).join(CREDENTIALS_FILE)).map_err(LocateError) -} - impl Credentials { + fn location() -> miette::Result { + Ok(crate::home().into_diagnostic()?.join(CREDENTIALS_FILE)) + } + /// Checks if the credentials exists pub async fn exists() -> miette::Result { - fs::try_exists(location().into_diagnostic()?) + fs::try_exists(Self::location()?) .await .into_diagnostic() .wrap_err(FileExistsError(CREDENTIALS_FILE)) @@ -65,10 +50,8 @@ impl Credentials { /// Reads the credentials from the file system pub async fn read() -> miette::Result> { - let location = location().into_diagnostic()?; - // if the file does not exist, we don't need to treat it as an error. - match fs::read_to_string(&location).await { + match fs::read_to_string(Self::location()?).await { Ok(contents) => { let raw: RawCredentialCollection = toml::from_str(&contents) .into_diagnostic() @@ -84,7 +67,7 @@ impl Credentials { /// Writes the credentials to the file system pub async fn write(&self) -> miette::Result<()> { - let location = location()?; + let location = Self::location()?; if let Some(parent) = location.parent() { // if directory already exists, error is returned but that is fine @@ -107,6 +90,7 @@ impl Credentials { /// Loads the credentials from the file system, returning default credentials if /// they do not exist. + /// /// Note, this should not create files in the user's home directory, as we should /// not be performing global stateful operations in absence of a user instruction. pub async fn load() -> miette::Result { diff --git a/src/generator.rs b/src/generator.rs deleted file mode 100644 index 1caf79b9..00000000 --- a/src/generator.rs +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2023 Helsing GmbH -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use std::{fmt, path::PathBuf}; - -use miette::{ensure, miette, Context, IntoDiagnostic}; -use serde::{Deserialize, Serialize}; -use tracing::{debug, info}; - -use crate::{manifest::Manifest, package::PackageStore}; - -/// The language used for code generation -#[derive( - Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, clap::ValueEnum, -)] -#[serde(rename_all = "kebab-case")] -#[allow(missing_docs)] // trivial enum -pub enum Language { - Python, -} - -impl fmt::Display for Language { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", serde_typename::to_str(&self).unwrap_or("unknown")) - } -} - -/// Backend used to generate code bindings -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub enum Generator { - /// The tonic + prost stack - Tonic, - /// The official `protoc` protobuf compiler - Protoc { - /// Target language for code generation - language: Language, - /// Target directory for the generated source files - out_dir: PathBuf, - }, -} - -impl Generator { - /// Tonic include file name - pub const TONIC_INCLUDE_FILE: &'static str = "buffrs.rs"; - - /// Run the generator for a dependency and output files at the provided path - pub async fn run(&self) -> miette::Result<()> { - let store = PackageStore::current().await?; - let manifest = Manifest::read().await?; - - let mut proto_files = vec![]; - - if let Some(ref pkg) = manifest.package { - store.populate(pkg).await?; - - proto_files.extend(store.populated_files(pkg).await); - } else { - proto_files.extend(store.collect(&store.proto_vendor_path(), true).await); - } - - let includes = &[store.proto_vendor_path()]; - - match self { - Generator::Tonic => { - tonic_build::configure() - .build_client(true) - .build_server(true) - .build_transport(true) - .include_file(Self::TONIC_INCLUDE_FILE) - .compile(&proto_files, includes) - .into_diagnostic()?; - } - Generator::Protoc { language, out_dir } => { - let mut protoc = protoc::ProtocLangOut::new(); - - match language { - Language::Python => { - protoc.lang("python").out_dir(out_dir); - } - } - - // Setting proto path causes protoc to replace occurrences of this string appearing in the - // path of the generated path with that provided by output path - // e.g. if input proto path is proto/vendor/units/units.proto and the proto path is 'proto' - // and the --python_out is 'proto/build/gen' then the file will be output to - // proto/build/gen/vendor/units/units.py - // We need both of these if we want "vendor" to be removed, and it has to come first - protoc.includes(["proto/vendor", "proto"]); - - protoc.inputs(&proto_files); - - debug!(":: running protoc"); - - protoc.run().into_diagnostic()?; - - info!(":: {language} code generated successfully"); - } - } - - Ok(()) - } -} - -impl Generator { - /// Execute code generation with pre-configured parameters - pub async fn generate(&self) -> miette::Result<()> { - let manifest = Manifest::read().await?; - let store = PackageStore::current().await?; - - let mut protos = vec![]; - - if let Some(ref pkg) = manifest.package { - store.populate(pkg).await?; - - protos.extend(store.populated_files(pkg).await); - } else { - protos.extend(store.collect(&store.proto_vendor_path(), true).await); - } - - info!(":: initializing code generator"); - - ensure!( - manifest.package.is_some() || !manifest.dependencies.is_empty() || !protos.is_empty(), - "either a compilable package (library or api) or at least one dependency/proto file is needed to generate code bindings." - ); - - self.run() - .await - .wrap_err(miette!("failed to generate bindings"))?; - - info!( - ":: compiled {}[{}]", - manifest - .package - .as_ref() - .map(|p| format!("{} ", p.name)) - .unwrap_or_else(|| "".to_string()), - store.proto_path().display() - ); - - for dependency in manifest.dependencies { - info!( - ":: compiled {} [{}]", - dependency.package, - store.locate(&dependency.package).display() - ); - } - - Ok(()) - } -} diff --git a/src/lib.rs b/src/lib.rs index 4f70868f..9db2fa12 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -15,15 +15,18 @@ #![warn(missing_docs)] #![doc = include_str!("../README.md")] +use miette::Diagnostic; +use std::{env, path::PathBuf}; +use thiserror::Error; + +/// Caching implementation +pub mod cache; /// CLI command implementations pub mod command; /// Credential management pub mod credentials; /// Common error types pub mod errors; -/// Code generator -#[cfg(feature = "build")] -pub mod generator; /// Lockfile implementation pub mod lock; /// Manifest format and IO @@ -38,36 +41,24 @@ pub mod resolver; #[cfg(feature = "validation")] pub mod validation; -/// Cargo build integration for buffrs -/// -/// Important: Only use this inside of cargo build scripts! -#[cfg(feature = "build")] -#[tokio::main(flavor = "current_thread")] -pub async fn build() -> miette::Result<()> { - println!( - "cargo:rerun-if-changed={}", - package::PackageStore::PROTO_PATH - ); - - command::install().await?; +/// Managed directory for `buffrs` +pub const BUFFRS_HOME: &str = ".buffrs"; - generator::Generator::Tonic.generate().await?; +pub(crate) const BUFFRS_HOME_VAR: &str = "BUFFRS_HOME"; - Ok(()) -} +#[derive(Error, Diagnostic, Debug)] +#[error("could not determine buffrs home location")] +struct HomeError(#[diagnostic_source] miette::Report); -/// Include generated rust language bindings for buffrs. -/// -/// ```rust,ignore -/// mod protos { -/// buffrs::include!(); -/// } -/// ``` -#[macro_export] -macro_rules! include { - () => { - ::std::include!(concat!(env!("OUT_DIR"), "/buffrs.rs",)); - }; +fn home() -> Result { + env::var(BUFFRS_HOME_VAR) + .map(PathBuf::from) + .or_else(|_| { + home::home_dir() + .ok_or_else(|| miette::miette!("{BUFFRS_HOME_VAR} is not set and the user's home folder could not be determined")) + }) + .map(|home| home.join(BUFFRS_HOME)) + .map_err(HomeError) } #[derive(Debug)] diff --git a/src/lock.rs b/src/lock.rs index 4ab1f8bd..90ad9153 100644 --- a/src/lock.rs +++ b/src/lock.rs @@ -19,6 +19,7 @@ use semver::Version; use serde::{Deserialize, Serialize}; use thiserror::Error; use tokio::fs; +use url::Url; use crate::{ errors::{DeserializationError, FileExistsError, FileNotFound, SerializationError, WriteError}, @@ -36,7 +37,7 @@ pub const LOCKFILE: &str = "Proto.lock"; /// Captures immutable metadata about a given package /// /// It is used to ensure that future installations will use the exact same dependencies. -#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct LockedPackage { /// The name of the package pub name: PackageName, @@ -58,8 +59,6 @@ pub struct LockedPackage { impl LockedPackage { /// Captures the source, version and checksum of a Package for use in reproducible installs - /// - /// Note that despite returning a Result this function never fails pub fn lock( package: &Package, registry: RegistryUri, @@ -220,3 +219,77 @@ impl FromIterator for Lockfile { } } } + +impl From for Vec { + fn from(lock: Lockfile) -> Self { + lock.packages.values().map(FileRequirement::from).collect() + } +} + +/// A requirement from a lockfile on a specific file being available in order to build the +/// overall graph. It's expected that when a file is downloaded, it's made available to buffrs +/// by setting the filename to the digest in whatever download directory. +#[derive(Serialize, Clone, PartialEq, Eq)] +pub struct FileRequirement { + pub(crate) package: PackageName, + pub(crate) url: Url, + pub(crate) digest: Digest, +} + +impl FileRequirement { + /// URL where the file can be located. + pub fn url(&self) -> &Url { + &self.url + } + + /// Construct new file requirement. + pub fn new( + url: &RegistryUri, + repository: &String, + name: &PackageName, + version: &Version, + digest: &Digest, + ) -> Self { + let mut url = url.clone(); + let new_path = format!( + "{}/{}/{}/{}-{}.tgz", + url.path(), + repository, + name, + name, + version + ); + + url.set_path(&new_path); + + Self { + package: name.to_owned(), + url: url.into(), + digest: digest.clone(), + } + } +} + +impl From for FileRequirement { + fn from(package: LockedPackage) -> Self { + Self::new( + &package.registry, + &package.repository, + &package.name, + &package.version, + &package.digest, + ) + } +} + +impl From<&LockedPackage> for FileRequirement { + fn from(package: &LockedPackage) -> Self { + Self::new( + &package.registry, + &package.repository, + &package.name, + &package.version, + &package.digest, + ) + } +} diff --git a/src/main.rs b/src/main.rs index 01a0919d..2c792d5f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -12,16 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::path::PathBuf; - use buffrs::command; -use buffrs::generator::Language; use buffrs::manifest::Manifest; use buffrs::package::{PackageName, PackageStore}; use buffrs::registry::RegistryUri; use buffrs::{manifest::MANIFEST_FILE, package::PackageType}; use clap::{Parser, Subcommand}; -use miette::{miette, IntoDiagnostic, WrapErr}; +use miette::{miette, WrapErr}; +use semver::Version; #[derive(Parser)] #[command(author, version, about, long_about)] @@ -76,6 +74,11 @@ enum Command { /// Generate package but do not write it to filesystem #[clap(long)] dry_run: bool, + /// Override the version from the manifest + /// + /// Note: This overrides the version in the manifest. + #[clap(long)] + set_version: Option, }, /// Packages and uploads this api to the registry @@ -92,6 +95,11 @@ enum Command { /// Abort right before uploading the release to the registry #[clap(long)] dry_run: bool, + /// Override the version from the manifest + /// + /// Note: This overrides the version in the manifest. + #[clap(long)] + set_version: Option, }, /// Installs dependencies @@ -103,18 +111,6 @@ enum Command { #[clap(alias = "ls")] List, - /// Generate code from installed buffrs packages - #[clap(alias = "gen")] - Generate { - /// Language used for code generation - #[clap(long = "lang")] - #[arg(value_enum)] - language: Language, - /// Directory where generated code should be created - #[clap(long = "out-dir")] - out_dir: PathBuf, - }, - /// Logs you in for a registry Login { /// Artifactory url (e.g. https:///artifactory) @@ -127,6 +123,20 @@ enum Command { #[clap(long)] registry: RegistryUri, }, + + /// Lockfile related commands + Lock { + #[command(subcommand)] + command: LockfileCommand, + }, +} + +#[derive(Subcommand)] +enum LockfileCommand { + /// Prints the file requirements derived from the lockfile serialized as JSON + /// + /// This is useful for consumption of the lockfile in other programs. + PrintFiles, } #[tokio::main(flavor = "current_thread")] @@ -151,9 +161,19 @@ async fn main() -> miette::Result<()> { None }; - let package: PackageName = manifest - .and_then(|m| m.package.map(|p| p.name)) - .unwrap_or(PackageName::new("current").into_diagnostic()?); + let package = { + let cwd = std::env::current_dir().unwrap(); + + let name = cwd + .file_name() + .ok_or_else(|| miette!("failed to locate current directory"))? + .to_str() + .ok_or_else(|| miette!("internal error"))?; + + manifest + .and_then(|m| m.package.map(|p| p.name.to_string())) + .unwrap_or_else(|| name.to_string()) + }; match cli.command { Command::Init { lib, api, package } => { @@ -192,7 +212,8 @@ async fn main() -> miette::Result<()> { Command::Package { output_directory, dry_run, - } => command::package(output_directory, dry_run) + set_version, + } => command::package(output_directory, dry_run, set_version) .await .wrap_err(miette!( "failed to export `{package}` into the buffrs package format" @@ -202,11 +223,13 @@ async fn main() -> miette::Result<()> { repository, allow_dirty, dry_run, + set_version, } => command::publish( registry.to_owned(), repository.to_owned(), allow_dirty, dry_run, + set_version, ) .await .wrap_err(miette!( @@ -225,8 +248,10 @@ async fn main() -> miette::Result<()> { Command::List => command::list().await.wrap_err(miette!( "failed to list installed protobuf files for `{package}`" )), - Command::Generate { language, out_dir } => command::generate(language, out_dir) - .await - .wrap_err(miette!("failed to generate {language} language bindings")), + Command::Lock { command } => match command { + LockfileCommand::PrintFiles => command::lock::print_files().await.wrap_err(miette!( + "failed to print locked file requirements of `{package}`" + )), + }, } } diff --git a/src/manifest.rs b/src/manifest.rs index 652ca60e..ca724fbf 100644 --- a/src/manifest.rs +++ b/src/manifest.rs @@ -45,6 +45,8 @@ pub enum Edition { /// at any time. Users are responsible for consulting documentation and /// help channels if errors occur. Canary, + /// The canary edition used by buffrs 0.7.x + Canary07, /// Unknown edition of manifests /// /// This is unrecommended as breaking changes could be introduced due to being @@ -109,6 +111,7 @@ mod serializer { { match self { Self::Canary => serializer.serialize_str(CANARY_EDITION), + Self::Canary07 => serializer.serialize_str("0.7"), Self::Unknown => serializer.serialize_str("unknown"), } } @@ -172,6 +175,7 @@ mod deserializer { { match value { c if c == CANARY_EDITION => Ok(Edition::Canary), + "0.7" => Ok(Edition::Canary07), _ => Ok(Edition::Unknown), } } @@ -226,7 +230,7 @@ mod deserializer { let edition = serde_typename::from_str(&edition); match edition { - Ok(Edition::Canary) => Ok(RawManifest::Canary { + Ok(Edition::Canary | Edition::Canary07) => Ok(RawManifest::Canary { package, dependencies, }), @@ -251,7 +255,7 @@ impl From for RawManifest { .collect(); match manifest.edition { - Edition::Canary => RawManifest::Canary { + Edition::Canary | Edition::Canary07 => RawManifest::Canary { package: manifest.package, dependencies, }, diff --git a/src/package/compressed.rs b/src/package/compressed.rs index 7f4e856a..e7a23b3e 100644 --- a/src/package/compressed.rs +++ b/src/package/compressed.rs @@ -151,7 +151,7 @@ impl Package { } /// Load a package from a precompressed archive. - fn parse(tgz: Bytes) -> miette::Result { + pub(crate) fn parse(tgz: Bytes) -> miette::Result { let mut tar = Vec::new(); let mut gz = flate2::read::GzDecoder::new(tgz.clone().reader()); diff --git a/src/resolver.rs b/src/resolver.rs index 5932c9c3..1c37fe10 100644 --- a/src/resolver.rs +++ b/src/resolver.rs @@ -6,6 +6,7 @@ use semver::VersionReq; use thiserror::Error; use crate::{ + cache::Cache, credentials::Credentials, lock::Lockfile, manifest::{Dependency, Manifest}, @@ -53,6 +54,7 @@ impl DependencyGraph { manifest: &Manifest, lockfile: &Lockfile, credentials: &Arc, + cache: &Cache, ) -> miette::Result { let name = manifest .package @@ -69,6 +71,7 @@ impl DependencyGraph { true, lockfile, credentials, + cache, &mut entries, ) .await?; @@ -84,6 +87,7 @@ impl DependencyGraph { is_root: bool, lockfile: &Lockfile, credentials: &Arc, + cache: &Cache, entries: &mut HashMap, ) -> miette::Result<()> { let version_req = dependency.manifest.version.clone(); @@ -91,17 +95,17 @@ impl DependencyGraph { ensure!( version_req.matches(entry.package.version()), "a dependency of your project requires {}@{} which collides with {}@{} required by {:?}", - dependency.package, - dependency.manifest.version, - entry.dependants[0].name.clone(), - dependency.manifest.version, - entry.package.manifest.package.as_ref().map(|p| &p.version) + dependency.package, + dependency.manifest.version, + entry.dependants[0].name.clone(), + dependency.manifest.version, + entry.package.manifest.package.as_ref().map(|p| &p.version) ); entry.dependants.push(Dependant { name, version_req }); } else { let dependency_pkg = - Self::resolve(dependency.clone(), is_root, lockfile, credentials).await?; + Self::resolve(dependency.clone(), is_root, lockfile, credentials, cache).await?; let dependency_name = dependency_pkg.name().clone(); let sub_dependencies = dependency_pkg.manifest.dependencies.clone(); @@ -128,6 +132,7 @@ impl DependencyGraph { false, lockfile, credentials, + cache, entries, ) .await?; @@ -142,6 +147,7 @@ impl DependencyGraph { is_root: bool, lockfile: &Lockfile, credentials: &Arc, + cache: &Cache, ) -> miette::Result { if let Some(local_locked) = lockfile.get(&dependency.package) { ensure!( @@ -160,6 +166,12 @@ impl DependencyGraph { local_locked.registry, ); + if let Some(cached) = cache.get(local_locked.into()).await? { + local_locked.validate(&cached)?; + + return Ok(cached); + } + let registry = Artifactory::new(dependency.manifest.registry.clone(), credentials) .wrap_err(DownloadError { name: dependency.package.clone(), @@ -174,7 +186,7 @@ impl DependencyGraph { version: dependency.manifest.version, })?; - local_locked.validate(&package)?; + cache.put(local_locked, package.tgz.clone()).await.ok(); Ok(package) } else { diff --git a/tests/cmd/add/out/Proto.toml b/tests/cmd/add/out/Proto.toml index fb4c7710..19d40a5d 100644 --- a/tests/cmd/add/out/Proto.toml +++ b/tests/cmd/add/out/Proto.toml @@ -1,4 +1,4 @@ -edition = "0.7" +edition = "0.8" [package] type = "lib" diff --git a/tests/cmd/init/api/out/Proto.toml b/tests/cmd/init/api/out/Proto.toml index 6049af60..af5f1d2c 100644 --- a/tests/cmd/init/api/out/Proto.toml +++ b/tests/cmd/init/api/out/Proto.toml @@ -1,4 +1,4 @@ -edition = "0.7" +edition = "0.8" [package] type = "api" diff --git a/tests/cmd/init/default/out/Proto.toml b/tests/cmd/init/default/out/Proto.toml index 9a48947a..c5835a70 100644 --- a/tests/cmd/init/default/out/Proto.toml +++ b/tests/cmd/init/default/out/Proto.toml @@ -1,3 +1,3 @@ -edition = "0.7" +edition = "0.8" [dependencies] diff --git a/tests/cmd/init/lib/out/Proto.toml b/tests/cmd/init/lib/out/Proto.toml index eb7c2464..79c3f8d8 100644 --- a/tests/cmd/init/lib/out/Proto.toml +++ b/tests/cmd/init/lib/out/Proto.toml @@ -1,4 +1,4 @@ -edition = "0.7" +edition = "0.8" [package] type = "lib" diff --git a/tests/cmd/package/out/lib-0.0.1.tgz b/tests/cmd/package/out/lib-0.0.1.tgz index 1847abf8..35d81f84 100644 Binary files a/tests/cmd/package/out/lib-0.0.1.tgz and b/tests/cmd/package/out/lib-0.0.1.tgz differ diff --git a/tests/cmd/publish/stdout.log b/tests/cmd/publish/stdout.log index 9694c4e3..d078a715 100644 --- a/tests/cmd/publish/stdout.log +++ b/tests/cmd/publish/stdout.log @@ -1,4 +1,2 @@ -`git status` returned an error: fatal: not a git repository (or any of the parent directories): .git - :: packaged lib@0.0.1 :: published my-repository/lib@0.0.1 diff --git a/tests/cmd/tuto/in/Cargo.toml b/tests/cmd/tuto/in/Cargo.toml index 7b20c24d..a10d1437 100644 --- a/tests/cmd/tuto/in/Cargo.toml +++ b/tests/cmd/tuto/in/Cargo.toml @@ -4,6 +4,10 @@ version = "0.1.0" edition = "2021" [dependencies] -tonic = "0.9" -prost = "0.11" -prost-types = "0.11" +tonic = "0.11" +prost = "0.12" +prost-types = "0.12" + +[build-dependencies] +tokio = { version = "1", features = ["full"] } +tonic-build = "0.11" diff --git a/tests/cmd/tuto/in/build.rs b/tests/cmd/tuto/in/build.rs index 39afcb12..37fc4b1f 100644 --- a/tests/cmd/tuto/in/build.rs +++ b/tests/cmd/tuto/in/build.rs @@ -1,3 +1,24 @@ -fn main() { - buffrs::build().unwrap(); +use std::path::Path; + +use buffrs::package::PackageStore; + +#[tokio::main] +async fn main() { + let store = Path::new(PackageStore::PROTO_VENDOR_PATH); + + let protos = PackageStore::current() + .await + .unwrap() + .collect(store, true) + .await; + + let includes = &[store]; + + tonic_build::configure() + .build_client(true) + .build_server(true) + .build_transport(true) + .include_file("buffrs.rs") + .compile(&protos, includes) + .unwrap(); } diff --git a/tests/cmd/tuto/in/main.rs b/tests/cmd/tuto/in/main.rs index 1fa54a01..4687a154 100644 --- a/tests/cmd/tuto/in/main.rs +++ b/tests/cmd/tuto/in/main.rs @@ -1,5 +1,5 @@ mod protos { - buffrs::include!(); + tonic::include_proto!("buffrs"); } struct Sensor; diff --git a/tests/cmd/tuto/mod.rs b/tests/cmd/tuto/mod.rs index 76885622..3cb84b7a 100644 --- a/tests/cmd/tuto/mod.rs +++ b/tests/cmd/tuto/mod.rs @@ -158,7 +158,7 @@ fn fixture() { "add", "buffrs", "--build", - "--features=build", + "--no-default-features", "--path", git_root ]) diff --git a/tests/data/projects/lib/Proto.toml b/tests/data/projects/lib/Proto.toml index e22dc8ca..f9216510 100644 --- a/tests/data/projects/lib/Proto.toml +++ b/tests/data/projects/lib/Proto.toml @@ -1,4 +1,4 @@ -edition = "0.7" +edition = "0.8" [package] type = "lib"