llama-bench : add support for the RPC backend (#7435)
This commit is contained in:
parent
87bdf2a199
commit
210d99173d
3 changed files with 35 additions and 2 deletions
1
ggml.h
1
ggml.h
|
@ -2428,6 +2428,7 @@ extern "C" {
|
|||
GGML_API int ggml_cpu_has_sse3 (void);
|
||||
GGML_API int ggml_cpu_has_ssse3 (void);
|
||||
GGML_API int ggml_cpu_has_sycl (void);
|
||||
GGML_API int ggml_cpu_has_rpc (void);
|
||||
GGML_API int ggml_cpu_has_vsx (void);
|
||||
GGML_API int ggml_cpu_has_matmul_int8(void);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue