ggerganov commited on
Commit
efcca56
·
1 Parent(s): 6e12dfd

ggml : add and use ggml_cpu_has_llamafile() (llama/8664)

Browse files
Files changed (2) hide show
  1. ggml/include/ggml.h +1 -0
  2. ggml/src/ggml.c +8 -0
ggml/include/ggml.h CHANGED
@@ -2400,6 +2400,7 @@ extern "C" {
2400
  GGML_API int ggml_cpu_has_vsx (void);
2401
  GGML_API int ggml_cpu_has_matmul_int8(void);
2402
  GGML_API int ggml_cpu_has_cann (void);
 
2403
 
2404
  //
2405
  // Internal types and functions exposed for tests and benchmarks
 
2400
  GGML_API int ggml_cpu_has_vsx (void);
2401
  GGML_API int ggml_cpu_has_matmul_int8(void);
2402
  GGML_API int ggml_cpu_has_cann (void);
2403
+ GGML_API int ggml_cpu_has_llamafile (void);
2404
 
2405
  //
2406
  // Internal types and functions exposed for tests and benchmarks
ggml/src/ggml.c CHANGED
@@ -22004,6 +22004,14 @@ int ggml_cpu_has_cann(void) {
22004
  #endif
22005
  }
22006
 
 
 
 
 
 
 
 
 
22007
  int ggml_cpu_has_gpublas(void) {
22008
  return ggml_cpu_has_cuda() || ggml_cpu_has_vulkan() || ggml_cpu_has_kompute() || ggml_cpu_has_sycl();
22009
  }
 
22004
  #endif
22005
  }
22006
 
22007
+ int ggml_cpu_has_llamafile(void) {
22008
+ #if defined(GGML_USE_LLAMAFILE)
22009
+ return 1;
22010
+ #else
22011
+ return 0;
22012
+ #endif
22013
+ }
22014
+
22015
  int ggml_cpu_has_gpublas(void) {
22016
  return ggml_cpu_has_cuda() || ggml_cpu_has_vulkan() || ggml_cpu_has_kompute() || ggml_cpu_has_sycl();
22017
  }