Skip to content

Commit dc3805e

Browse files
authored
[BUG] Verify v0.2 IPEX-LLM Support and Hotfix (#16)
# CHANGES - bump to `v0.2.0` --------- Co-authored-by: tjtanaa <[email protected]>
1 parent 6fad3e7 commit dc3805e

File tree

4 files changed

+28
-18
lines changed

4 files changed

+28
-18
lines changed

ellm_api_server.spec

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ if backend in ('directml', 'cpu', 'cuda'):
6161
add_package('onnxruntime')
6262
add_package('onnxruntime_genai')
6363
elif backend == 'ipex':
64+
print(f"Backend IPEX")
6465
add_package('ipex_llm')
6566
add_package('torch')
6667
add_package('torchvision')

setup.py

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def run(self):
6060
"install",
6161
"--pre",
6262
"--upgrade",
63-
"ipex-llm[xpu]",
63+
"ipex-llm[xpu]==2.1.0b20240731",
6464
"--extra-index-url",
6565
"https://pytorch-extension.intel.com/release-whl/stable/xpu/us/",
6666
],
@@ -74,6 +74,18 @@ def run(self):
7474
text=True,
7575
)
7676

77+
result = subprocess.run(
78+
["pip", "uninstall", "numpy", "-y"],
79+
capture_output=True,
80+
text=True,
81+
)
82+
83+
result = subprocess.run(
84+
["pip", "install", "numpy==1.26.4", "--no-cache"],
85+
capture_output=True,
86+
text=True,
87+
)
88+
7789
if _is_directml():
7890
result = subprocess.run(
7991
["conda", "install", "conda-forge::vs2015_runtime", "-y"],
@@ -93,7 +105,7 @@ def run(self):
93105
"install",
94106
"--pre",
95107
"--upgrade",
96-
"ipex-llm[xpu]",
108+
"ipex-llm[xpu]==2.1.0b20240731",
97109
"--extra-index-url",
98110
"https://pytorch-extension.intel.com/release-whl/stable/xpu/us/",
99111
],
@@ -107,6 +119,18 @@ def run(self):
107119
text=True,
108120
)
109121

122+
result = subprocess.run(
123+
["pip", "uninstall", "numpy", "-y"],
124+
capture_output=True,
125+
text=True,
126+
)
127+
128+
result = subprocess.run(
129+
["pip", "install", "numpy==1.26.4", "--no-cache"],
130+
capture_output=True,
131+
text=True,
132+
)
133+
110134
if _is_directml():
111135
result = subprocess.run(
112136
["conda", "install", "conda-forge::vs2015_runtime", "-y"],

src/embeddedllm/entrypoints/api_server.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -99,21 +99,6 @@ def main():
9999

100100
print(f"PATH: {os.environ['PATH']}")
101101

102-
os.environ["SYCL_CACHE_PERSISTENT"] = "1"
103-
os.environ["BIGDL_LLM_XMX_DISABLED"] = "1"
104-
import sys
105-
106-
if getattr(sys, "frozen", False):
107-
# Running as compiled executable
108-
bundle_dir = sys._MEIPASS
109-
else:
110-
# Running in a normal Python environment
111-
bundle_dir = os.path.dirname(os.path.abspath(__file__))
112-
113-
os.environ["PATH"] = bundle_dir + os.pathsep + os.environ["PATH"]
114-
115-
print(f"PATH: {os.environ['PATH']}")
116-
117102
import uvicorn
118103

119104
if os.name == "nt":

src/embeddedllm/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.0.1"
1+
__version__ = "0.2.0"

0 commit comments

Comments
 (0)