File tree Expand file tree Collapse file tree 2 files changed +1
-7
lines changed Expand file tree Collapse file tree 2 files changed +1
-7
lines changed Original file line number Diff line number Diff line change @@ -745,10 +745,6 @@ backend-assets/grpc/llama-cpp-fallback: backend-assets/grpc backend/cpp/llama/ll
745745 $(info ${GREEN}I llama-cpp build info:fallback${RESET})
746746 CMAKE_ARGS=" $( CMAKE_ARGS) -DGGML_AVX=off -DGGML_AVX2=off -DGGML_AVX512=off -DGGML_FMA=off -DGGML_F16C=off" $(MAKE ) VARIANT=" llama-fallback" build-llama-cpp-grpc-server
747747 cp -rfv backend/cpp/llama-fallback/grpc-server backend-assets/grpc/llama-cpp-fallback
748- # TODO: every binary should have its own folder instead, so can have different metal implementations
749- ifeq ($(BUILD_TYPE ) ,metal)
750- cp backend/cpp/llama-fallback/llama.cpp/build/bin/ggml-metal.metal backend-assets/grpc/
751- endif
752748
753749backend-assets/grpc/llama-cpp-cuda : backend-assets/grpc backend/cpp/llama/llama.cpp
754750 cp -rf backend/cpp/llama backend/cpp/llama-cuda
Original file line number Diff line number Diff line change @@ -30,9 +30,7 @@ else ifeq ($(OS),Darwin)
3030 CMAKE_ARGS+=-DGGML_METAL=OFF
3131 else
3232 CMAKE_ARGS+=-DGGML_METAL=ON
33- # Until this is tested properly, we disable embedded metal file
34- # as we already embed it as part of the LocalAI assets
35- CMAKE_ARGS+=-DGGML_METAL_EMBED_LIBRARY=OFF
33+ CMAKE_ARGS+=-DGGML_METAL_EMBED_LIBRARY=ON
3634 TARGET+=--target ggml-metal
3735 endif
3836endif
You can’t perform that action at this time.
0 commit comments