|
| 1 | +(ns llms |
| 2 | + (:require |
| 3 | + [org.httpkit.client :as hk-client] |
| 4 | + [cheshire.core :as json])) |
| 5 | + |
| 6 | +;; # using Large Language Models from Clojure |
| 7 | +;; LLMs often come as APIs, as they require computing power (GPUs), which most users do not have |
| 8 | +;; localy. |
| 9 | +;; OpenAI offers their models behind an (paid) API for example. In the following we will see three |
| 10 | +;;diferent ways to use the GPT-4 model from OpenAI |
| 11 | + |
| 12 | +;; get the openai API key either from environemnt or a specific file |
| 13 | +(def open-ai-key |
| 14 | + (or (System/getenv "OPEN_AI_KEY") |
| 15 | + (slurp "open_ai_secret.txt") |
| 16 | + ) |
| 17 | + ) |
| 18 | + |
| 19 | +;## Use OpenAI API directly |
| 20 | +;; OpenAI offers a rather simple API, text-in text-out for "chatting" with GPT |
| 21 | +;; |
| 22 | +;; The following shows how to ask a simple question, and getting the answer using an http libray, |
| 23 | +;; [http-kit](https://github.com/http-kit/http-kit). The API is based on JSON, so easy to use |
| 24 | +;; from Clojure |
| 25 | + |
| 26 | + |
| 27 | +(-> |
| 28 | + @(hk-client/post "https://api.openai.com/v1/chat/completions" |
| 29 | + {:headers |
| 30 | + {"content-type" "application/json" |
| 31 | + "authorization" (format "Bearer %s" open-ai-key)} |
| 32 | + :body |
| 33 | + (json/encode |
| 34 | + {:model "gpt-4" |
| 35 | + :messages [{:role "system", |
| 36 | + :content "You are a helpful assistant."}, |
| 37 | + {:role "user", |
| 38 | + :content "What is Clojure ?"}]})}) |
| 39 | + :body |
| 40 | + (json/decode keyword)) |
| 41 | + |
| 42 | +; ## use Bosquet |
| 43 | +; [bosquet](https://github.com/zmedelis/bosquet) abstracts some of the concepts of LLMs |
| 44 | +; on a higher level API. Its has further notions of "memory" and "tools" |
| 45 | +; and has feature we find for exampl in python "LangChain" |
| 46 | + |
| 47 | +;; Bosque wants the API key in a config file |
| 48 | +(spit "secrets.edn" |
| 49 | + (pr-str |
| 50 | + {:openai {:api-key open-ai-key}})) |
| 51 | + |
| 52 | + |
| 53 | +(require '[bosquet.llm.generator :refer [generate llm]]) |
| 54 | + |
| 55 | +(generate |
| 56 | + [[:user "What is Clojure"] |
| 57 | + [:assistant (llm :openai |
| 58 | + :llm/model-params {:model :gpt-4 |
| 59 | + })]]) |
| 60 | + |
| 61 | + |
| 62 | +;# use langchain4j |
| 63 | +;; We can use LLMs as well via a Java Interop and teh library |
| 64 | +;; [lnagchain4j](https://github.com/langchain4j/langchain4j) which aims |
| 65 | +;; to be a copy of the pythin langcahin, and offers support or |
| 66 | +;; build blcoks for several consept arround LLMs (model, vecstorstores, document loaders) |
| 67 | +;; We see it used in te following chapters |
| 68 | + |
| 69 | +(import '[dev.langchain4j.model.openai OpenAiChatModel OpenAiChatModelName]) |
| 70 | + |
| 71 | +;; For now just the simplest call to an GPT model, asking it the same question: |
| 72 | +(def open-ai-chat-model |
| 73 | + (.. (OpenAiChatModel/builder) |
| 74 | + (apiKey open-ai-key) |
| 75 | + (modelName OpenAiChatModelName/GPT_4) |
| 76 | + build)) |
| 77 | + |
| 78 | + |
| 79 | +(.generate open-ai-chat-model "What is Clojure ?") |
0 commit comments