Skip to content

Commit 825cb8c

Browse files
uditkarodeDhruvgera
authored andcommitted
chat: Add SSE streaming for Llama.cpp based models
1 parent 6369c75 commit 825cb8c

10 files changed

+239
-225
lines changed

public/favicon.ico

-3.78 KB
Binary file not shown.

src/App.css

-109
This file was deleted.

src/App.js

-13
This file was deleted.

src/App.test.js

-8
This file was deleted.

src/ChatGptInterface.js

+117-61
Original file line numberDiff line numberDiff line change
@@ -1,96 +1,142 @@
1-
import React, { useState, useRef, useEffect } from "react";
2-
import "./App.css"; // Import custom CSS for styling
1+
import React, { useState, useRef, useEffect, Fragment } from "react";
2+
import "./index.css";
3+
4+
const host = "http://localhost:8080";
5+
const temperature = 0.7;
36

47
const ChatGptInterface = () => {
58
const [messages, setMessages] = useState([]);
69
const [input, setInput] = useState("");
710
const [isLoading, setIsLoading] = useState(false);
811
const [error, setError] = useState(null);
9-
const [models, setModels] = useState([]); // Added state for models
12+
const [models, setModels] = useState([]);
13+
const [currentAssistantMessage, setCurrentAssistantMessage] = useState("");
1014
const chatContainerRef = useRef(null);
1115

1216
const handleInputChange = (e) => {
1317
setInput(e.target.value);
1418
};
1519

16-
const handleSubmit = async () => {
17-
// Reset error state and set loading state
18-
setError(null);
19-
setIsLoading(true);
20-
21-
try {
22-
const requestOptions = {
23-
method: "POST",
24-
headers: { "Content-Type": "application/json" },
25-
body: JSON.stringify({
26-
model: selectedModel, // Use selectedModel from state as model name
27-
messages: [
28-
...messages,
29-
{
30-
role: "user",
31-
content: input,
32-
},
33-
],
34-
temperature: 0.7,
35-
}),
36-
};
37-
38-
const response = await fetch(
39-
"http://localhost:8080/v1/chat/completions",
40-
requestOptions
41-
);
42-
43-
const data = await response.json();
44-
const assistantResponse =
45-
data?.choices?.[0]?.message?.content || "No response found";
46-
20+
const handleSubmit = async () => {
21+
// Add user input to messages
4722
setMessages((prevMessages) => [
4823
...prevMessages,
49-
{ role: "user", content: input }, // Append user input message
50-
{ role: "assistant", content: assistantResponse },
24+
{ role: "user", content: input },
5125
]);
5226

53-
// Clear input field
54-
setInput("");
55-
} catch (error) {
56-
console.error("Error:", error);
57-
setError("Failed to fetch response. Please try again: " + error.message); // Update error message
58-
} finally {
59-
// Set loading state to false after response or error is received
60-
setIsLoading(false);
61-
}
62-
};
27+
// Reset error state and set loading state
28+
setError(null);
29+
setIsLoading(true);
30+
31+
try {
32+
const requestOptions = {
33+
method: "POST",
34+
headers: { "Content-Type": "application/json" },
35+
body: JSON.stringify({
36+
model: selectedModel,
37+
messages: [
38+
...messages,
39+
{
40+
role: "user",
41+
content: input,
42+
},
43+
],
44+
temperature,
45+
stream: true,
46+
}),
47+
};
48+
49+
const response = await fetch(`${host}/v1/chat/completions`, requestOptions);
50+
51+
let data = "";
52+
const reader = response.body.getReader();
53+
let partialData = "";
54+
let done = false;
55+
let assistantResponse = "";
56+
57+
while (!done) {
58+
const { value, done: readerDone } = await reader.read();
59+
60+
done = readerDone;
61+
62+
if (value) {
63+
const chunk = new TextDecoder().decode(value);
64+
partialData += chunk;
65+
const lines = partialData.split("\n");
66+
67+
for (let i = 0; i < lines.length - 1; i++) {
68+
const line = lines[i];
69+
if (line.startsWith("data: ")) {
70+
const jsonStr = line.substring("data: ".length);
71+
const json = JSON.parse(jsonStr);
72+
73+
// Check if the response contains choices and delta fields
74+
if (json.choices && json.choices.length > 0 && json.choices[0].delta) {
75+
const token = json.choices[0].delta.content;
76+
if (token !== undefined) {
77+
assistantResponse += token;
78+
setCurrentAssistantMessage(assistantResponse);
79+
}
80+
}
81+
}
82+
}
83+
84+
partialData = lines[lines.length - 1];
85+
}
86+
}
6387

64-
// Scroll to the bottom of the chat container whenever a new message is added
65-
useEffect(() => {
66-
if (chatContainerRef.current) {
67-
chatContainerRef.current.scrollTop = chatContainerRef.current.scrollHeight;
88+
// Add assistant response to messages
89+
setMessages((prevMessages) => [
90+
...prevMessages,
91+
{ role: "assistant", content: assistantResponse },
92+
]);
93+
94+
// Clear input field and currentAssistantMessage
95+
setInput("");
96+
setCurrentAssistantMessage("");
97+
} catch (error) {
98+
console.error("Error:", error);
99+
setError("Failed to fetch response. Please try again: " + error.message);
100+
} finally {
101+
setIsLoading(false);
68102
}
69-
}, [messages]);
70-
71-
103+
};
72104

73105
useEffect(() => {
74-
// Fetch models on component mount
75106
const fetchModels = async () => {
76107
try {
77-
const response = await fetch(
78-
"http://localhost:8080/v1/models"
79-
);
108+
const response = await fetch(`${host}/v1/models`);
80109
const data = await response.json();
81110
setModels(data?.data || []);
82111
} catch (error) {
83112
console.error("Error:", error);
84113
}
85114
};
86115
fetchModels();
87-
}, []); // Empty dependency array to fetch models only on mount
116+
}, []);
88117

89118
const handleModelChange = (e) => {
90119
setSelectedModel(e.target.value);
91120
};
92121

93-
const [selectedModel, setSelectedModel] = useState(""); // Added state for selected model
122+
const [selectedModel, setSelectedModel] = useState("");
123+
124+
useEffect(() => {
125+
if (chatContainerRef.current) {
126+
chatContainerRef.current.scrollTop =
127+
chatContainerRef.current.scrollHeight;
128+
}
129+
}, [messages, currentAssistantMessage]);
130+
131+
const renderMessageContent = (content) => {
132+
const parts = content.split("\n");
133+
return parts.map((part, index) => (
134+
<Fragment key={index}>
135+
{part}
136+
{index < parts.length - 1 && <br />}
137+
</Fragment>
138+
));
139+
};
94140

95141
return (
96142
<div className="chat-page">
@@ -122,9 +168,19 @@ const [selectedModel, setSelectedModel] = useState(""); // Added state for selec
122168
<span className="message-role">
123169
{message.role === "user" ? "You" : "LocalAI"}:
124170
</span>
125-
<span className="message-content">{message.content}</span>
171+
<span className="message-content">
172+
{renderMessageContent(message.content)}
173+
</span>
126174
</div>
127175
))}
176+
{isLoading && (
177+
<div className="chat-message assistant-message">
178+
<span className="message-role">LocalAI:</span>
179+
<span className="message-content">
180+
{renderMessageContent(currentAssistantMessage)}
181+
</span>
182+
</div>
183+
)}
128184
</div>
129185
</div>
130186
<div className="chat-input">
@@ -151,4 +207,4 @@ const [selectedModel, setSelectedModel] = useState(""); // Added state for selec
151207
);
152208
};
153209

154-
export default ChatGptInterface;
210+
export default ChatGptInterface;

0 commit comments

Comments
 (0)