Skip to content

Commit e459118

Browse files
authored
Merge branch 'master' into fix/stream_tokens_usage
2 parents 8e8e05d + cbedf2f commit e459118

File tree

2 files changed

+1
-7
lines changed

2 files changed

+1
-7
lines changed

Makefile

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -745,10 +745,6 @@ backend-assets/grpc/llama-cpp-fallback: backend-assets/grpc backend/cpp/llama/ll
745745
$(info ${GREEN}I llama-cpp build info:fallback${RESET})
746746
CMAKE_ARGS="$(CMAKE_ARGS) -DGGML_AVX=off -DGGML_AVX2=off -DGGML_AVX512=off -DGGML_FMA=off -DGGML_F16C=off" $(MAKE) VARIANT="llama-fallback" build-llama-cpp-grpc-server
747747
cp -rfv backend/cpp/llama-fallback/grpc-server backend-assets/grpc/llama-cpp-fallback
748-
# TODO: every binary should have its own folder instead, so can have different metal implementations
749-
ifeq ($(BUILD_TYPE),metal)
750-
cp backend/cpp/llama-fallback/llama.cpp/build/bin/ggml-metal.metal backend-assets/grpc/
751-
endif
752748

753749
backend-assets/grpc/llama-cpp-cuda: backend-assets/grpc backend/cpp/llama/llama.cpp
754750
cp -rf backend/cpp/llama backend/cpp/llama-cuda

backend/cpp/llama/Makefile

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,7 @@ else ifeq ($(OS),Darwin)
3030
CMAKE_ARGS+=-DGGML_METAL=OFF
3131
else
3232
CMAKE_ARGS+=-DGGML_METAL=ON
33-
# Until this is tested properly, we disable embedded metal file
34-
# as we already embed it as part of the LocalAI assets
35-
CMAKE_ARGS+=-DGGML_METAL_EMBED_LIBRARY=OFF
33+
CMAKE_ARGS+=-DGGML_METAL_EMBED_LIBRARY=ON
3634
TARGET+=--target ggml-metal
3735
endif
3836
endif

0 commit comments

Comments
 (0)