llama: use host memory if device reports 0 memory (llama/18587)

This commit is contained in:
Aaron Teo 2026-01-09 05:34:56 +08:00 committed by Georgi Gerganov
parent a71127dfd8
commit fff3ebd93d
2 changed files with 3 additions and 3 deletions

View File

@ -144,7 +144,7 @@ extern "C" {
// device description: short informative description of the device, could be the model name
const char * (*get_description)(ggml_backend_dev_t dev);
// device memory in bytes
// device memory in bytes: 0 bytes to indicate no memory to report
void (*get_memory)(ggml_backend_dev_t dev, size_t * free, size_t * total);
// device type

View File

@ -4287,8 +4287,8 @@ static const char * ggml_backend_opencl_device_get_description(ggml_backend_dev_
}
static void ggml_backend_opencl_device_get_memory(ggml_backend_dev_t dev, size_t * free, size_t * total) {
*free = 1;
*total = 1;
*free = 0;
*total = 0;
GGML_UNUSED(dev);
}