Skip to content

Commit

Permalink
Update TOMLs/version 0.0.1 before cargo release
Browse files Browse the repository at this point in the history
  • Loading branch information
philpax committed May 4, 2023
1 parent 93f50bd commit 0fb17b9
Show file tree
Hide file tree
Showing 17 changed files with 82 additions and 57 deletions.
25 changes: 10 additions & 15 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ members = [
resolver = "2"

[workspace.package]
version = "0.1.0"
repository = "https://github.com/rustformers/llm"
license = "MIT OR Apache-2.0"

[workspace.dependencies]
bytemuck = "1.13.1"
Expand Down
3 changes: 3 additions & 0 deletions binaries/generate-ggml-bindings/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,8 @@ version = "0.1.0"
edition = "2021"
publish = false

[package.metadata.release]
release = false

[dependencies]
bindgen = "0.65.1"
6 changes: 4 additions & 2 deletions binaries/llm-cli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
[package]
edition = "2021"
name = "llm-cli"
version = { workspace = true }
repository = "https://github.com/rustformers/llm"
version = "0.0.1"
repository = { workspace = true }
license = { workspace = true }
description = "A CLI for running inference on supported Large Language Models. Powered by the `llm` library."

[[bin]]
name = "llm"
Expand Down
7 changes: 5 additions & 2 deletions crates/ggml/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
[package]
name = "ggml"
version = { workspace = true }
version = "0.0.1"
repository = { workspace = true }
edition = "2021"
description = "Semi-idiomatic Rust bindings for the ggml library (from `ggml-sys`)."
license = "MIT"

[dependencies]
thiserror = { workspace = true }
ggml-sys = { path = "sys" }
ggml-sys = { path = "sys", version = "0.0.1" }

[dev-dependencies]
rand = { workspace = true }
5 changes: 4 additions & 1 deletion crates/ggml/sys/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
[package]
name = "ggml-sys"
version = { workspace = true }
version = "0.0.1"
repository = { workspace = true }
edition = "2021"
description = "Raw bindings (i.e. bindgen output) for the ggml library."
license = "MIT"

[build-dependencies]
cc = "^1.0"
7 changes: 5 additions & 2 deletions crates/llm-base/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
[package]
name = "llm-base"
version = { workspace = true }
version = "0.0.1"
license = { workspace = true }
repository = { workspace = true }
description = "The base for `llm`; provides common structure for model implementations. Not intended for use by end-users."
edition = "2021"
rust-version = "1.65"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
ggml = { path = "../ggml" }
ggml = { path = "../ggml", version = "0.0.1" }

bytemuck = { workspace = true }
rand = { workspace = true }
Expand Down
17 changes: 10 additions & 7 deletions crates/llm/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
[package]
name = "llm"
version = { workspace = true }
version = "0.0.1"
license = { workspace = true }
repository = { workspace = true }
description = "A Rust ecosystem of libraries for running inference on large language models, inspired by llama.cpp."
edition = "2021"

[dependencies]
llm-base = { path = "../llm-base" }
llm-llama = { path = "../models/llama", features = ["convert"], optional = true }
llm-gpt2 = { path = "../models/gpt2", optional = true }
llm-gptj = { path = "../models/gptj", optional = true }
llm-bloom = { path = "../models/bloom", optional = true }
llm-neox = { path = "../models/neox", optional = true }
llm-base = { path = "../llm-base", version = "0.0.1" }
llm-llama = { path = "../models/llama", features = ["convert"], optional = true, version = "0.0.1" }
llm-gpt2 = { path = "../models/gpt2", optional = true, version = "0.0.1" }
llm-gptj = { path = "../models/gptj", optional = true, version = "0.0.1" }
llm-bloom = { path = "../models/bloom", optional = true, version = "0.0.1" }
llm-neox = { path = "../models/neox", optional = true, version = "0.0.1" }

[dev-dependencies]
rand = { workspace = true }
Expand Down
8 changes: 5 additions & 3 deletions crates/models/bloom/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
[package]
name = "llm-bloom"
version = { workspace = true }
version = "0.0.1"
license = { workspace = true }
repository = { workspace = true }
description = "An implementation of BLOOM (BigScience Large Open-science Open-access Multilingual Language Model) for the `llm` ecosystem."
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
ggml = { path = "../../ggml" }
llm-base = { path = "../../llm-base" }
llm-base = { path = "../../llm-base", version = "0.0.1" }

bytemuck = { workspace = true }
6 changes: 3 additions & 3 deletions crates/models/bloom/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
use std::path::Path;

use llm_base::{
model::common, util, EvaluateOutputRequest, FileType, InferenceParameters, InferenceSession,
InferenceSessionParameters, InferenceWithPromptParameters, KnownModel, LoadError, LoadProgress,
Mmap, ModelParameters, TokenId, Vocabulary,
ggml, model::common, util, EvaluateOutputRequest, FileType, InferenceParameters,
InferenceSession, InferenceSessionParameters, InferenceWithPromptParameters, KnownModel,
LoadError, LoadProgress, Mmap, ModelParameters, TokenId, Vocabulary,
};

/// The BLOOM model.
Expand Down
8 changes: 5 additions & 3 deletions crates/models/gpt2/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
[package]
name = "llm-gpt2"
version = { workspace = true }
version = "0.0.1"
license = { workspace = true }
repository = { workspace = true }
description = "An implementation of GPT-2 for the `llm` ecosystem."
edition = "2021"

[dependencies]
ggml = { path = "../../ggml" }
llm-base = { path = "../../llm-base" }
llm-base = { path = "../../llm-base", version = "0.0.1" }

bytemuck = { workspace = true }
6 changes: 3 additions & 3 deletions crates/models/gpt2/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ use std::path::Path;

use ggml::Tensor;
use llm_base::{
model::common, util, EvaluateOutputRequest, FileType, InferenceParameters, InferenceSession,
InferenceSessionParameters, InferenceWithPromptParameters, KnownModel, LoadError, LoadProgress,
ModelParameters, TokenId, Vocabulary,
ggml, model::common, util, EvaluateOutputRequest, FileType, InferenceParameters,
InferenceSession, InferenceSessionParameters, InferenceWithPromptParameters, KnownModel,
LoadError, LoadProgress, ModelParameters, TokenId, Vocabulary,
};

/// The GPT-2 model.
Expand Down
8 changes: 5 additions & 3 deletions crates/models/gptj/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
[package]
name = "llm-gptj"
version = { workspace = true }
version = "0.0.1"
license = { workspace = true }
repository = { workspace = true }
description = "An implementation of GPT-J for the `llm` ecosystem."
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
llm-base = { path = "../../llm-base" }
ggml = { path = "../../ggml" }
llm-base = { path = "../../llm-base", version = "0.0.1" }

bytemuck = { workspace = true }
7 changes: 4 additions & 3 deletions crates/models/gptj/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ use std::{error::Error, path::Path};

use ggml::Tensor;
use llm_base::{
model::common, util, BasicWriteError, EvaluateOutputRequest, FileType, InferenceParameters,
InferenceSession, InferenceSessionParameters, InferenceWithPromptParameters, KnownModel,
LoadError, LoadProgress, Mmap, ModelParameters, TensorLoader, TokenId, Vocabulary,
ggml, model::common, util, BasicWriteError, EvaluateOutputRequest, FileType,
InferenceParameters, InferenceSession, InferenceSessionParameters,
InferenceWithPromptParameters, KnownModel, LoadError, LoadProgress, Mmap, ModelParameters,
TensorLoader, TokenId, Vocabulary,
};

pub struct GptJ {
Expand Down
8 changes: 5 additions & 3 deletions crates/models/llama/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
[package]
name = "llm-llama"
version = { workspace = true }
version = "0.0.1"
license = { workspace = true }
repository = { workspace = true }
description = "An implementation of LLaMA (Large Language Model Meta AI) for the `llm` ecosystem."
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
llm-base = { path = "../../llm-base" }
ggml = { path = "../../ggml" }
llm-base = { path = "../../llm-base", version = "0.0.1" }

bytemuck = { workspace = true }
rand = { workspace = true }
Expand Down
8 changes: 5 additions & 3 deletions crates/models/neox/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
[package]
name = "llm-neox"
version = { workspace = true }
version = "0.0.1"
license = { workspace = true }
repository = { workspace = true }
description = "An implementation of GPT-NeoX for the `llm` ecosystem."
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
llm-base = { path = "../../llm-base" }
ggml = { path = "../../ggml" }
llm-base = { path = "../../llm-base", version = "0.0.1" }

bytemuck = { workspace = true }
7 changes: 4 additions & 3 deletions crates/models/neox/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ use std::{error::Error, path::Path};

use ggml::Tensor;
use llm_base::{
model::common, util, BasicWriteError, EvaluateOutputRequest, FileType, InferenceParameters,
InferenceSession, InferenceSessionParameters, InferenceWithPromptParameters, KnownModel,
LoadError, LoadProgress, Mmap, ModelParameters, TensorLoader, TokenId, Vocabulary,
ggml, model::common, util, BasicWriteError, EvaluateOutputRequest, FileType,
InferenceParameters, InferenceSession, InferenceSessionParameters,
InferenceWithPromptParameters, KnownModel, LoadError, LoadProgress, Mmap, ModelParameters,
TensorLoader, TokenId, Vocabulary,
};

pub struct NeoX {
Expand Down

0 comments on commit 0fb17b9

Please sign in to comment.