-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathMakefile
More file actions
109 lines (89 loc) · 2.35 KB
/
Makefile
File metadata and controls
109 lines (89 loc) · 2.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# `llama.cpp` 代码存放目录
LLAMA_DIR = $(abspath ./llama.cpp)
SERVER_BIN_PATH = $(abspath ./llama-server/)
# 目标架构
OS := $(shell uname -s)
ARCH := $(shell uname -m)
# Go 编译的最终二进制文件
ONEINFER_BIN = ./oneinfer
# CMake选项
CMAKE_OPTS = -B$(LLAMA_DIR)/build -DCMAKE_BUILD_TYPE=Release
# 默认后端选项
ifdef USE_BLAS
CMAKE_OPTS += -DGGML_BLAS=ON
endif
ifdef USE_CUDA
CMAKE_OPTS += -DGGML_CUDA=ON
endif
ifdef USE_MUSA
CMAKE_OPTS += -DGGML_MUSA=ON
endif
ifdef USE_HIP
CMAKE_OPTS += -DGGML_HIP=ON
endif
ifdef USE_CANN
CMAKE_OPTS += -DGGML_CANN=ON
endif
ifdef USE_VULKAN
CMAKE_OPTS += -DGGML_VULKAN=ON
endif
ifdef USE_METAL
CMAKE_OPTS += -DGGML_METAL=ON
endif
ifdef USE_SYCL
CMAKE_OPTS += -DGGML_SYCL=ON
endif
# 默认目标:编译 oneinfer
.PHONY: all clean build llama copy_libs
all: build
# 1. 克隆 llama.cpp(如果不存在)
$(LLAMA_DIR):
@if [ ! -d "$(LLAMA_DIR)" ]; then \
echo "Cloning llama.cpp..."; \
git clone https://github.com/ggerganov/llama.cpp $(LLAMA_DIR); \
else \
echo "llama.cpp already exists."; \
fi
# 2. 编译 `llama.cpp server` 使用 CMake
llama: $(LLAMA_DIR)
@if [ ! -f "$(SERVER_BIN)" ]; then \
echo "Compiling llama server with CMake..."; \
cd $(LLAMA_DIR) && \
@if [ "$(OS)" == "Windows_NT" ]; then \
cmake -B build $(CMAKE_OPTS) && \
cmake --build build --config Release --target llama-server; \
else \
mkdir -p build && \
cd build && \
cmake $(CMAKE_OPTS) .. && \
make llama-server -j8; \
fi; \
mkdir -p $(SERVER_BIN_PATH); \
cp $(LLAMA_DIR)/build/bin/llama-server $(SERVER_BIN_PATH); \
else \
echo "Llama server already compiled."; \
fi
# 3. 查找并复制所有生成的 `.so` 文件到 `$(SERVER_BIN)` 目录
copy_libs:
@echo "Copying shared libraries..."
@if [ -d "$(LLAMA_DIR)/build/bin" ]; then \
mkdir -p $(SERVER_BIN_PATH); \
OS=$(shell uname); \
if [ "$$OS" = "Darwin" ]; then \
cp $(LLAMA_DIR)/build/bin/* $(SERVER_BIN_PATH); \
else \
cp $(LLAMA_DIR)/build/bin/*.so $(SERVER_BIN_PATH); \
fi \
else \
echo "No shared libraries found."; \
fi
# 4. 编译 Go 项目
build: llama copy_libs
@echo "Building oneinfer..."; \
go build -o $(ONEINFER_BIN) ./main.go; \
# 5. 运行 `oneinfer`
run: build
./$(ONEINFER_BIN)
# 6. 清理
clean:
rm -rf $(LLAMA_DIR)/build $(SERVER_BIN) $(ONEINFER_BIN) llama-server