mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
create a separate target
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
390ed6a09b
commit
138800117e
2
.github/workflows/release.yaml
vendored
2
.github/workflows/release.yaml
vendored
@ -88,7 +88,7 @@ jobs:
|
||||
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.0
|
||||
export PATH=$PATH:$GOPATH/bin
|
||||
export PATH=/usr/local/cuda/bin:$PATH
|
||||
GO_TAGS=p2p GOOS=linux GOARCH=arm64 CMAKE_ARGS="-DCMAKE_C_COMPILER=aarch64-linux-gnu-gcc -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++" DIST_SKIP_HIPBLAS=true make dist
|
||||
GO_TAGS=p2p GOOS=linux GOARCH=arm64 CMAKE_ARGS="-DCMAKE_C_COMPILER=aarch64-linux-gnu-gcc -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++" make dist-aarch64
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: LocalAI-linux-aarch64
|
||||
|
15
Makefile
15
Makefile
@ -324,13 +324,24 @@ build-api:
|
||||
dist:
|
||||
STATIC=true $(MAKE) backend-assets/grpc/llama-cpp-avx2
|
||||
ifeq ($(OS),Darwin)
|
||||
$(info ${GREEN}I Skip CUDA build on MacOS${RESET})
|
||||
$(info ${GREEN}I Skip CUDA/hipblas build on MacOS${RESET})
|
||||
else
|
||||
$(MAKE) backend-assets/grpc/llama-cpp-cuda
|
||||
ifneq ($(DIST_SKIP_HIPBLAS),true)
|
||||
$(MAKE) backend-assets/grpc/llama-cpp-hipblas
|
||||
endif
|
||||
$(MAKE) build
|
||||
mkdir -p release
|
||||
# if BUILD_ID is empty, then we don't append it to the binary name
|
||||
ifeq ($(BUILD_ID),)
|
||||
cp $(BINARY_NAME) release/$(BINARY_NAME)-$(OS)-$(ARCH)
|
||||
shasum -a 256 release/$(BINARY_NAME)-$(OS)-$(ARCH) > release/$(BINARY_NAME)-$(OS)-$(ARCH).sha256
|
||||
else
|
||||
cp $(BINARY_NAME) release/$(BINARY_NAME)-$(BUILD_ID)-$(OS)-$(ARCH)
|
||||
shasum -a 256 release/$(BINARY_NAME)-$(BUILD_ID)-$(OS)-$(ARCH) > release/$(BINARY_NAME)-$(BUILD_ID)-$(OS)-$(ARCH).sha256
|
||||
endif
|
||||
|
||||
dist-aarch64:
|
||||
CMAKE_ARGS="$(CMAKE_ARGS) -DLLAMA_NATIVE=off" GRPC_BACKENDS="backend-assets/grpc/llama-cpp-fallback backend-assets/grpc/llama-cpp-cuda backend-assets/grpc/huggingface backend-assets/grpc/bert-embeddings backend-assets/grpc/llama-ggml backend-assets/grpc/llama-cpp-grpc backend-assets/util/llama-cpp-rpc-server backend-assets/grpc/gpt4all backend-assets/grpc/rwkv backend-assets/grpc/whisper backend-assets/grpc/local-store" \
|
||||
$(MAKE) build
|
||||
mkdir -p release
|
||||
# if BUILD_ID is empty, then we don't append it to the binary name
|
||||
|
Loading…
Reference in New Issue
Block a user