File tree Expand file tree Collapse file tree 4 files changed +16
-4
lines changed
model-providers/gpu-llama3/runtime Expand file tree Collapse file tree 4 files changed +16
-4
lines changed Original file line number Diff line number Diff line change 5151 <dependency >
5252 <groupId >io.quarkiverse.langchain4j</groupId >
5353 <artifactId >quarkus-langchain4j-gpu-llama3-deployment</artifactId >
54- <version >${quarkus-langchain4j.version} </version >
54+ <version >999-SNAPSHOT </version >
5555 <type >pom</type >
5656 <scope >test</scope >
5757 <exclusions >
Original file line number Diff line number Diff line change @@ -3,6 +3,3 @@ quarkus.langchain4j.gpu-llama3.chat-model.model-path=/Users/orion/LLMModels/beeh
33quarkus.langchain4j.gpu-llama3.enable-integration =true
44quarkus.langchain4j.gpu-llama3.chat-model.temperature =0.7
55quarkus.langchain4j.gpu-llama3.chat-model.max-tokens =100
6-
7- # Configure class loading
8- quarkus.class-loading.parent-first-artifacts =org.graalvm:graal-sdk,io.github.beehive-lab:gpu-llama3
Original file line number Diff line number Diff line change 128128 <artifactId >quarkus-langchain4j-llama3-java</artifactId >
129129 <version >999-SNAPSHOT</version >
130130 </dependency >
131+ <dependency >
132+ <groupId >io.quarkiverse.langchain4j</groupId >
133+ <artifactId >quarkus-langchain4j-gpu-llama3</artifactId >
134+ <version >999-SNAPSHOT</version >
135+ </dependency >
131136 <dependency >
132137 <groupId >io.quarkiverse.langchain4j</groupId >
133138 <artifactId >quarkus-langchain4j-mcp</artifactId >
Original file line number Diff line number Diff line change 6565 </configuration >
6666 </execution >
6767 </executions >
68+ <configuration >
69+ <parentFirstArtifacts >
70+ <parentFirstArtifact >org.graalvm:graal-sdk</parentFirstArtifact >
71+ <parentFirstArtifact >io.github.beehive-lab:gpu-llama3</parentFirstArtifact >
72+ </parentFirstArtifacts >
73+ <runnerParentFirstArtifacts >
74+ <runnerParentFirstArtifact >org.graalvm:graal-sdk</runnerParentFirstArtifact >
75+ <runnerParentFirstArtifact >io.github.beehive-lab:gpu-llama3</runnerParentFirstArtifact >
76+ </runnerParentFirstArtifacts >
77+ </configuration >
6878 </plugin >
6979 <plugin >
7080 <artifactId >maven-compiler-plugin</artifactId >
You can’t perform that action at this time.
0 commit comments