Skip to content

Commit

Permalink
changed paths to reference llama.cpp
Browse files Browse the repository at this point in the history
  • Loading branch information
LLukas22 committed Jun 9, 2023
1 parent 85d6468 commit 859c9a8
Show file tree
Hide file tree
Showing 6 changed files with 155 additions and 18 deletions.
6 changes: 3 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[submodule "crates/ggml/sys/ggml"]
path = crates/ggml/sys/ggml
url = https://github.com/ggerganov/ggml.git
[submodule "crates/ggml/sys/llama-cpp"]
path = crates/ggml/sys/llama-cpp
url = https://github.com/ggerganov/llama.cpp
2 changes: 1 addition & 1 deletion binaries/generate-ggml-bindings/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use std::path::PathBuf;

fn main() {
let bindings = bindgen::Builder::default()
.header("crates/ggml/sys/ggml/include/ggml/ggml.h")
.header("crates/ggml/sys/llama-cpp/ggml.h")
// Suppress some warnings
.raw_line("#![allow(non_upper_case_globals)]")
.raw_line("#![allow(non_camel_case_types)]")
Expand Down
4 changes: 2 additions & 2 deletions crates/ggml/sys/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ fn main() {
let mut builder = cc::Build::new();

let build = builder
.files(["ggml/src/ggml.c"])
.includes(["ggml/include/ggml"]);
.files(["llama-cpp/ggml.c"])
.includes(["llama-cpp"]);

// This is a very basic heuristic for applying compile flags.
// Feel free to update this to fit your operating system.
Expand Down
1 change: 0 additions & 1 deletion crates/ggml/sys/ggml
Submodule ggml deleted from db5eef
1 change: 1 addition & 0 deletions crates/ggml/sys/llama-cpp
Submodule llama-cpp added at ae9663
159 changes: 148 additions & 11 deletions crates/ggml/sys/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,20 @@ pub const ggml_type_GGML_TYPE_Q5_0: ggml_type = 6;
pub const ggml_type_GGML_TYPE_Q5_1: ggml_type = 7;
pub const ggml_type_GGML_TYPE_Q8_0: ggml_type = 8;
pub const ggml_type_GGML_TYPE_Q8_1: ggml_type = 9;
pub const ggml_type_GGML_TYPE_I8: ggml_type = 10;
pub const ggml_type_GGML_TYPE_I16: ggml_type = 11;
pub const ggml_type_GGML_TYPE_I32: ggml_type = 12;
pub const ggml_type_GGML_TYPE_COUNT: ggml_type = 13;
pub const ggml_type_GGML_TYPE_Q2_K: ggml_type = 10;
pub const ggml_type_GGML_TYPE_Q3_K: ggml_type = 11;
pub const ggml_type_GGML_TYPE_Q4_K: ggml_type = 12;
pub const ggml_type_GGML_TYPE_Q5_K: ggml_type = 13;
pub const ggml_type_GGML_TYPE_Q6_K: ggml_type = 14;
pub const ggml_type_GGML_TYPE_Q8_K: ggml_type = 15;
pub const ggml_type_GGML_TYPE_I8: ggml_type = 16;
pub const ggml_type_GGML_TYPE_I16: ggml_type = 17;
pub const ggml_type_GGML_TYPE_I32: ggml_type = 18;
pub const ggml_type_GGML_TYPE_COUNT: ggml_type = 19;
pub type ggml_type = ::std::os::raw::c_uint;
pub const ggml_backend_GGML_BACKEND_CPU: ggml_backend = 0;
pub const ggml_backend_GGML_BACKEND_CUDA: ggml_backend = 1;
pub const ggml_backend_GGML_BACKEND_CL: ggml_backend = 2;
pub const ggml_backend_GGML_BACKEND_GPU: ggml_backend = 10;
pub const ggml_backend_GGML_BACKEND_GPU_SPLIT: ggml_backend = 20;
pub type ggml_backend = ::std::os::raw::c_uint;
pub const ggml_ftype_GGML_FTYPE_UNKNOWN: ggml_ftype = -1;
pub const ggml_ftype_GGML_FTYPE_ALL_F32: ggml_ftype = 0;
Expand All @@ -60,6 +66,11 @@ pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: ggml_ftype = 4;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q8_0: ggml_ftype = 7;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q5_0: ggml_ftype = 8;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q5_1: ggml_ftype = 9;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q2_K: ggml_ftype = 10;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q3_K: ggml_ftype = 11;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q4_K: ggml_ftype = 12;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q5_K: ggml_ftype = 13;
pub const ggml_ftype_GGML_FTYPE_MOSTLY_Q6_K: ggml_ftype = 14;
pub type ggml_ftype = ::std::os::raw::c_int;
pub const ggml_op_GGML_OP_NONE: ggml_op = 0;
pub const ggml_op_GGML_OP_DUP: ggml_op = 1;
Expand Down Expand Up @@ -198,7 +209,8 @@ pub struct ggml_tensor {
pub perf_time_us: i64,
pub data: *mut ::std::os::raw::c_void,
pub name: [::std::os::raw::c_char; 32usize],
pub padding: [::std::os::raw::c_char; 16usize],
pub extra: *mut ::std::os::raw::c_void,
pub padding: [::std::os::raw::c_char; 4usize],
}
#[test]
fn bindgen_test_layout_ggml_tensor() {
Expand Down Expand Up @@ -385,8 +397,18 @@ fn bindgen_test_layout_ggml_tensor() {
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).padding) as usize - ptr as usize },
unsafe { ::std::ptr::addr_of!((*ptr).extra) as usize - ptr as usize },
208usize,
concat!(
"Offset of field: ",
stringify!(ggml_tensor),
"::",
stringify!(extra)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).padding) as usize - ptr as usize },
216usize,
concat!(
"Offset of field: ",
stringify!(ggml_tensor),
Expand Down Expand Up @@ -640,6 +662,84 @@ fn bindgen_test_layout_ggml_init_params() {
)
);
}
pub const ggml_task_type_GGML_TASK_INIT: ggml_task_type = 0;
pub const ggml_task_type_GGML_TASK_COMPUTE: ggml_task_type = 1;
pub const ggml_task_type_GGML_TASK_FINALIZE: ggml_task_type = 2;
pub type ggml_task_type = ::std::os::raw::c_int;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ggml_compute_params {
pub type_: ggml_task_type,
pub ith: ::std::os::raw::c_int,
pub nth: ::std::os::raw::c_int,
pub wsize: usize,
pub wdata: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout_ggml_compute_params() {
const UNINIT: ::std::mem::MaybeUninit<ggml_compute_params> = ::std::mem::MaybeUninit::uninit();
let ptr = UNINIT.as_ptr();
assert_eq!(
::std::mem::size_of::<ggml_compute_params>(),
32usize,
concat!("Size of: ", stringify!(ggml_compute_params))
);
assert_eq!(
::std::mem::align_of::<ggml_compute_params>(),
8usize,
concat!("Alignment of ", stringify!(ggml_compute_params))
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).type_) as usize - ptr as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ggml_compute_params),
"::",
stringify!(type_)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).ith) as usize - ptr as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ggml_compute_params),
"::",
stringify!(ith)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).nth) as usize - ptr as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ggml_compute_params),
"::",
stringify!(nth)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).wsize) as usize - ptr as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(ggml_compute_params),
"::",
stringify!(wsize)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).wdata) as usize - ptr as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(ggml_compute_params),
"::",
stringify!(wdata)
)
);
}
extern "C" {
pub fn ggml_time_init();
}
Expand All @@ -664,9 +764,18 @@ extern "C" {
extern "C" {
pub fn ggml_nelements(tensor: *const ggml_tensor) -> i64;
}
extern "C" {
pub fn ggml_nrows(tensor: *const ggml_tensor) -> i64;
}
extern "C" {
pub fn ggml_nbytes(tensor: *const ggml_tensor) -> usize;
}
extern "C" {
pub fn ggml_nbytes_split(
tensor: *const ggml_tensor,
nrows_split: ::std::os::raw::c_int,
) -> usize;
}
extern "C" {
pub fn ggml_blck_size(type_: ggml_type) -> ::std::os::raw::c_int;
}
Expand All @@ -691,6 +800,12 @@ extern "C" {
extern "C" {
pub fn ggml_ftype_to_ggml_type(ftype: ggml_ftype) -> ggml_type;
}
extern "C" {
pub fn ggml_is_transposed(tensor: *const ggml_tensor) -> bool;
}
extern "C" {
pub fn ggml_is_contiguous(tensor: *const ggml_tensor) -> bool;
}
extern "C" {
pub fn ggml_tensor_overhead() -> usize;
}
Expand All @@ -709,6 +824,12 @@ extern "C" {
extern "C" {
pub fn ggml_set_no_alloc(ctx: *mut ggml_context, no_alloc: bool);
}
extern "C" {
pub fn ggml_get_mem_buffer(ctx: *mut ggml_context) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn ggml_get_mem_size(ctx: *mut ggml_context) -> usize;
}
extern "C" {
pub fn ggml_new_tensor(
ctx: *mut ggml_context,
Expand Down Expand Up @@ -763,6 +884,12 @@ extern "C" {
extern "C" {
pub fn ggml_view_tensor(ctx: *mut ggml_context, src: *const ggml_tensor) -> *mut ggml_tensor;
}
extern "C" {
pub fn ggml_get_tensor(
ctx: *mut ggml_context,
name: *const ::std::os::raw::c_char,
) -> *mut ggml_tensor;
}
extern "C" {
pub fn ggml_set_zero(tensor: *mut ggml_tensor) -> *mut ggml_tensor;
}
Expand Down Expand Up @@ -1295,11 +1422,21 @@ extern "C" {
pub fn ggml_graph_reset(cgraph: *mut ggml_cgraph);
}
extern "C" {
pub fn ggml_get_tensor_by_name(
pub fn ggml_graph_get_tensor(
cgraph: *mut ggml_cgraph,
name: *const ::std::os::raw::c_char,
) -> *mut ggml_tensor;
}
extern "C" {
pub fn ggml_graph_export(cgraph: *const ggml_cgraph, fname: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn ggml_graph_import(
fname: *const ::std::os::raw::c_char,
ctx_data: *mut *mut ggml_context,
ctx_eval: *mut *mut ggml_context,
) -> ggml_cgraph;
}
extern "C" {
pub fn ggml_graph_print(cgraph: *const ggml_cgraph);
}
Expand All @@ -1312,12 +1449,12 @@ extern "C" {
}
pub const ggml_opt_type_GGML_OPT_ADAM: ggml_opt_type = 0;
pub const ggml_opt_type_GGML_OPT_LBFGS: ggml_opt_type = 1;
pub type ggml_opt_type = ::std::os::raw::c_uint;
pub type ggml_opt_type = ::std::os::raw::c_int;
pub const ggml_linesearch_GGML_LINESEARCH_DEFAULT: ggml_linesearch = 1;
pub const ggml_linesearch_GGML_LINESEARCH_BACKTRACKING_ARMIJO: ggml_linesearch = 0;
pub const ggml_linesearch_GGML_LINESEARCH_BACKTRACKING_WOLFE: ggml_linesearch = 1;
pub const ggml_linesearch_GGML_LINESEARCH_BACKTRACKING_STRONG_WOLFE: ggml_linesearch = 2;
pub type ggml_linesearch = ::std::os::raw::c_uint;
pub type ggml_linesearch = ::std::os::raw::c_int;
pub const ggml_opt_result_GGML_OPT_OK: ggml_opt_result = 0;
pub const ggml_opt_result_GGML_OPT_DID_NOT_CONVERGE: ggml_opt_result = 1;
pub const ggml_opt_result_GGML_OPT_NO_CONTEXT: ggml_opt_result = 2;
Expand Down

0 comments on commit 859c9a8

Please sign in to comment.