guix-mirrors/gnu/packages/patches/llama-cpp-vulkan-optional.patch
Danny Milosavljevic 0b1eea99c3
gnu: llama-cpp: Prevent undefined behavior.
* gnu/packages/patches/llama-cpp-vulkan-optional.patch: Modify.

Change-Id: I58816f098a0da2b75cea5f90bda91bcf0bfe60d1
2025-02-08 14:03:12 +01:00

38 lines
1.3 KiB
Diff

Author: Danny Milosavljevic <dannym@friendly-machines.com>
Date: 2025-01-29
License: Expat
Subject: Make Vulkan optional
See also: <https://github.com/ggerganov/llama.cpp/pull/11494>
diff -ru orig/llama.cpp/ggml/include/ggml-vulkan.h llama.cpp/ggml/include/ggml-vulkan.h
--- orig/llama.cpp/ggml/include/ggml-vulkan.h 2025-01-29 10:24:10.894476682 +0100
+++ llama.cpp/ggml/include/ggml-vulkan.h 2025-02-07 18:28:34.509509638 +0100
@@ -10,8 +10,6 @@
#define GGML_VK_NAME "Vulkan"
#define GGML_VK_MAX_DEVICES 16
-GGML_BACKEND_API void ggml_vk_instance_init(void);
-
// backend API
GGML_BACKEND_API ggml_backend_t ggml_backend_vk_init(size_t dev_num);
diff -ru orig/llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp
--- orig/llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp 2025-01-29 10:24:10.922476480 +0100
+++ llama.cpp/ggml/src/ggml-vulkan/ggml-vulkan.cpp 2025-01-29 22:33:19.955087552 +0100
@@ -8174,8 +8174,13 @@
/* .iface = */ ggml_backend_vk_reg_i,
/* .context = */ nullptr,
};
-
- return &reg;
+ try {
+ ggml_vk_instance_init();
+ return &reg;
+ } catch (const vk::SystemError& e) {
+ VK_LOG_DEBUG("ggml_vk_get_device_count() -> Error: System error: " << e.what());
+ return nullptr;
+ }
}
// Extension availability