ggml : add build-time message to remind about ggml_set_rows (#14661)

ggml-ci
This commit is contained in:
Georgi Gerganov 2025-07-13 10:36:33 +03:00 committed by GitHub
parent dcf7f2ea3c
commit 05fec5bd29
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 4 additions and 0 deletions

View File

@ -2090,6 +2090,7 @@ static bool ggml_backend_cann_supports_op(ggml_backend_dev_t dev,
{ {
// TODO: add support // TODO: add support
// ref: https://github.com/ggml-org/llama.cpp/pull/14274 // ref: https://github.com/ggml-org/llama.cpp/pull/14274
#pragma message("TODO: implement F32, F16, BF16, Q4_0, Q4_1, Q5_0, Q5_1, Q8_0, IQ4_NL support (https://github.com/ggml-org/llama.cpp/pull/14661)")
return false; return false;
} break; } break;
case GGML_OP_CPY: { case GGML_OP_CPY: {

View File

@ -3222,6 +3222,7 @@ static bool ggml_backend_cuda_device_supports_op(ggml_backend_dev_t dev, const g
} break; } break;
case GGML_OP_SET_ROWS: case GGML_OP_SET_ROWS:
{ {
#pragma message("TODO: implement BF16, Q4_0, Q4_1, Q5_0, Q5_1, Q8_0, IQ4_NL support (https://github.com/ggml-org/llama.cpp/pull/14661)")
return (op->type == GGML_TYPE_F32 || op->type == GGML_TYPE_F16) && return (op->type == GGML_TYPE_F32 || op->type == GGML_TYPE_F16) &&
op->src[0]->type == GGML_TYPE_F32 && op->src[0]->type == GGML_TYPE_F32 &&
op->src[1]->type == GGML_TYPE_I64; op->src[1]->type == GGML_TYPE_I64;

View File

@ -2280,6 +2280,7 @@ static bool ggml_opencl_supports_op(ggml_backend_dev_t dev, const struct ggml_te
{ {
// TODO: add support // TODO: add support
// ref: https://github.com/ggml-org/llama.cpp/pull/14274 // ref: https://github.com/ggml-org/llama.cpp/pull/14274
#pragma message("TODO: implement BF16, Q4_0, Q4_1, Q5_0, Q5_1, Q8_0, IQ4_NL support (https://github.com/ggml-org/llama.cpp/pull/14661)")
if (op->src[0]->type != GGML_TYPE_F32) { if (op->src[0]->type != GGML_TYPE_F32) {
return false; return false;
} }

View File

@ -4303,6 +4303,7 @@ static bool ggml_backend_sycl_device_supports_op(ggml_backend_dev_t dev, const g
{ {
// TODO: add support // TODO: add support
// ref: https://github.com/ggml-org/llama.cpp/pull/14274 // ref: https://github.com/ggml-org/llama.cpp/pull/14274
#pragma message("TODO: implement BF16, Q4_0, Q4_1, Q5_0, Q5_1, Q8_0, IQ4_NL support (https://github.com/ggml-org/llama.cpp/pull/14661)")
return (op->type == GGML_TYPE_F32 || (op->type == GGML_TYPE_F16 && op->src[0]->type == GGML_TYPE_F32 && op->src[1]->type == GGML_TYPE_I64)); return (op->type == GGML_TYPE_F32 || (op->type == GGML_TYPE_F16 && op->src[0]->type == GGML_TYPE_F32 && op->src[1]->type == GGML_TYPE_I64));
} break; } break;
case GGML_OP_CPY: case GGML_OP_CPY: