From 56c5f988eb5bfc35b5b7b70964f81a17a45e5374 Mon Sep 17 00:00:00 2001 From: Feng Jiang Date: Wed, 21 Aug 2024 16:44:44 +0800 Subject: [PATCH] ggml/kompute: Introduce ggml_backend_kompute_get_device_memory() Signed-off-by: Feng Jiang --- ggml/include/ggml-kompute.h | 1 + ggml/src/ggml-kompute.cpp | 13 +++++++++++++ 2 files changed, 14 insertions(+) diff --git a/ggml/include/ggml-kompute.h b/ggml/include/ggml-kompute.h index 1de2cb478..7d7623614 100644 --- a/ggml/include/ggml-kompute.h +++ b/ggml/include/ggml-kompute.h @@ -26,6 +26,7 @@ struct ggml_vk_device { struct ggml_vk_device * ggml_vk_available_devices(size_t memoryRequired, size_t * count); int ggml_backend_kompute_get_device_count(void); +void ggml_backend_kompute_get_device_memory(int device, size_t * free, size_t * total); bool ggml_vk_get_device(struct ggml_vk_device * device, size_t memoryRequired, const char * name); bool ggml_vk_has_vulkan(void); diff --git a/ggml/src/ggml-kompute.cpp b/ggml/src/ggml-kompute.cpp index 21fe76648..dfecf0881 100644 --- a/ggml/src/ggml-kompute.cpp +++ b/ggml/src/ggml-kompute.cpp @@ -339,6 +339,19 @@ int ggml_backend_kompute_get_device_count(void) { return devices.size(); } + +void ggml_backend_kompute_get_device_memory(int device, size_t * free, size_t * total) { + auto devices = ggml_vk_available_devices_internal(0); + + for (std::size_t i = 0; i < devices.size(); i++) { + if (devices[i].index == device) { + *total = devices[i].heapSize; + *free = devices[i].heapSize; + break; + } + } +} + static void ggml_vk_filterByVendor(std::vector& devices, const std::string& targetVendor) { devices.erase( std::remove_if(devices.begin(), devices.end(),