mpi : trying to move more MPI stuff into ggml-mpi (WIP) (#2099)
This commit is contained in:
parent
ef61acfbf5
commit
3232db628c
11 changed files with 134 additions and 67 deletions
4
llama.h
4
llama.h
|
@ -158,9 +158,9 @@ extern "C" {
|
|||
// Initialize the llama + ggml backend
|
||||
// If numa is true, use NUMA optimizations
|
||||
// Call once at the start of the program
|
||||
LLAMA_API void llama_init_backend(bool numa);
|
||||
LLAMA_API void llama_backend_init(bool numa);
|
||||
// Call once at the end of the program - currently only used for MPI
|
||||
LLAMA_API void llama_finalize_backend();
|
||||
LLAMA_API void llama_backend_free();
|
||||
|
||||
LLAMA_API int64_t llama_time_us();
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue