1
- import React , { useState , useRef , useEffect } from "react" ;
2
- import "./App.css" ; // Import custom CSS for styling
1
+ import React , { useState , useRef , useEffect , Fragment } from "react" ;
2
+ import "./index.css" ;
3
+
4
+ const host = "http://localhost:8080" ;
5
+ const temperature = 0.7 ;
3
6
4
7
const ChatGptInterface = ( ) => {
5
8
const [ messages , setMessages ] = useState ( [ ] ) ;
6
9
const [ input , setInput ] = useState ( "" ) ;
7
10
const [ isLoading , setIsLoading ] = useState ( false ) ;
8
11
const [ error , setError ] = useState ( null ) ;
9
- const [ models , setModels ] = useState ( [ ] ) ; // Added state for models
12
+ const [ models , setModels ] = useState ( [ ] ) ;
13
+ const [ currentAssistantMessage , setCurrentAssistantMessage ] = useState ( "" ) ;
10
14
const chatContainerRef = useRef ( null ) ;
11
15
12
16
const handleInputChange = ( e ) => {
13
17
setInput ( e . target . value ) ;
14
18
} ;
15
19
16
- const handleSubmit = async ( ) => {
17
- // Reset error state and set loading state
18
- setError ( null ) ;
19
- setIsLoading ( true ) ;
20
-
21
- try {
22
- const requestOptions = {
23
- method : "POST" ,
24
- headers : { "Content-Type" : "application/json" } ,
25
- body : JSON . stringify ( {
26
- model : selectedModel , // Use selectedModel from state as model name
27
- messages : [
28
- ...messages ,
29
- {
30
- role : "user" ,
31
- content : input ,
32
- } ,
33
- ] ,
34
- temperature : 0.7 ,
35
- } ) ,
36
- } ;
37
-
38
- const response = await fetch (
39
- "http://localhost:8080/v1/chat/completions" ,
40
- requestOptions
41
- ) ;
42
-
43
- const data = await response . json ( ) ;
44
- const assistantResponse =
45
- data ?. choices ?. [ 0 ] ?. message ?. content || "No response found" ;
46
-
20
+ const handleSubmit = async ( ) => {
21
+ // Add user input to messages
47
22
setMessages ( ( prevMessages ) => [
48
23
...prevMessages ,
49
- { role : "user" , content : input } , // Append user input message
50
- { role : "assistant" , content : assistantResponse } ,
24
+ { role : "user" , content : input } ,
51
25
] ) ;
52
26
53
- // Clear input field
54
- setInput ( "" ) ;
55
- } catch ( error ) {
56
- console . error ( "Error:" , error ) ;
57
- setError ( "Failed to fetch response. Please try again: " + error . message ) ; // Update error message
58
- } finally {
59
- // Set loading state to false after response or error is received
60
- setIsLoading ( false ) ;
61
- }
62
- } ;
27
+ // Reset error state and set loading state
28
+ setError ( null ) ;
29
+ setIsLoading ( true ) ;
30
+
31
+ try {
32
+ const requestOptions = {
33
+ method : "POST" ,
34
+ headers : { "Content-Type" : "application/json" } ,
35
+ body : JSON . stringify ( {
36
+ model : selectedModel ,
37
+ messages : [
38
+ ...messages ,
39
+ {
40
+ role : "user" ,
41
+ content : input ,
42
+ } ,
43
+ ] ,
44
+ temperature,
45
+ stream : true ,
46
+ } ) ,
47
+ } ;
48
+
49
+ const response = await fetch ( `${ host } /v1/chat/completions` , requestOptions ) ;
50
+
51
+ let data = "" ;
52
+ const reader = response . body . getReader ( ) ;
53
+ let partialData = "" ;
54
+ let done = false ;
55
+ let assistantResponse = "" ;
56
+
57
+ while ( ! done ) {
58
+ const { value, done : readerDone } = await reader . read ( ) ;
59
+
60
+ done = readerDone ;
61
+
62
+ if ( value ) {
63
+ const chunk = new TextDecoder ( ) . decode ( value ) ;
64
+ partialData += chunk ;
65
+ const lines = partialData . split ( "\n" ) ;
66
+
67
+ for ( let i = 0 ; i < lines . length - 1 ; i ++ ) {
68
+ const line = lines [ i ] ;
69
+ if ( line . startsWith ( "data: " ) ) {
70
+ const jsonStr = line . substring ( "data: " . length ) ;
71
+ const json = JSON . parse ( jsonStr ) ;
72
+
73
+ // Check if the response contains choices and delta fields
74
+ if ( json . choices && json . choices . length > 0 && json . choices [ 0 ] . delta ) {
75
+ const token = json . choices [ 0 ] . delta . content ;
76
+ if ( token !== undefined ) {
77
+ assistantResponse += token ;
78
+ setCurrentAssistantMessage ( assistantResponse ) ;
79
+ }
80
+ }
81
+ }
82
+ }
83
+
84
+ partialData = lines [ lines . length - 1 ] ;
85
+ }
86
+ }
63
87
64
- // Scroll to the bottom of the chat container whenever a new message is added
65
- useEffect ( ( ) => {
66
- if ( chatContainerRef . current ) {
67
- chatContainerRef . current . scrollTop = chatContainerRef . current . scrollHeight ;
88
+ // Add assistant response to messages
89
+ setMessages ( ( prevMessages ) => [
90
+ ...prevMessages ,
91
+ { role : "assistant" , content : assistantResponse } ,
92
+ ] ) ;
93
+
94
+ // Clear input field and currentAssistantMessage
95
+ setInput ( "" ) ;
96
+ setCurrentAssistantMessage ( "" ) ;
97
+ } catch ( error ) {
98
+ console . error ( "Error:" , error ) ;
99
+ setError ( "Failed to fetch response. Please try again: " + error . message ) ;
100
+ } finally {
101
+ setIsLoading ( false ) ;
68
102
}
69
- } , [ messages ] ) ;
70
-
71
-
103
+ } ;
72
104
73
105
useEffect ( ( ) => {
74
- // Fetch models on component mount
75
106
const fetchModels = async ( ) => {
76
107
try {
77
- const response = await fetch (
78
- "http://localhost:8080/v1/models"
79
- ) ;
108
+ const response = await fetch ( `${ host } /v1/models` ) ;
80
109
const data = await response . json ( ) ;
81
110
setModels ( data ?. data || [ ] ) ;
82
111
} catch ( error ) {
83
112
console . error ( "Error:" , error ) ;
84
113
}
85
114
} ;
86
115
fetchModels ( ) ;
87
- } , [ ] ) ; // Empty dependency array to fetch models only on mount
116
+ } , [ ] ) ;
88
117
89
118
const handleModelChange = ( e ) => {
90
119
setSelectedModel ( e . target . value ) ;
91
120
} ;
92
121
93
- const [ selectedModel , setSelectedModel ] = useState ( "" ) ; // Added state for selected model
122
+ const [ selectedModel , setSelectedModel ] = useState ( "" ) ;
123
+
124
+ useEffect ( ( ) => {
125
+ if ( chatContainerRef . current ) {
126
+ chatContainerRef . current . scrollTop =
127
+ chatContainerRef . current . scrollHeight ;
128
+ }
129
+ } , [ messages , currentAssistantMessage ] ) ;
130
+
131
+ const renderMessageContent = ( content ) => {
132
+ const parts = content . split ( "\n" ) ;
133
+ return parts . map ( ( part , index ) => (
134
+ < Fragment key = { index } >
135
+ { part }
136
+ { index < parts . length - 1 && < br /> }
137
+ </ Fragment >
138
+ ) ) ;
139
+ } ;
94
140
95
141
return (
96
142
< div className = "chat-page" >
@@ -122,9 +168,19 @@ const [selectedModel, setSelectedModel] = useState(""); // Added state for selec
122
168
< span className = "message-role" >
123
169
{ message . role === "user" ? "You" : "LocalAI" } :
124
170
</ span >
125
- < span className = "message-content" > { message . content } </ span >
171
+ < span className = "message-content" >
172
+ { renderMessageContent ( message . content ) }
173
+ </ span >
126
174
</ div >
127
175
) ) }
176
+ { isLoading && (
177
+ < div className = "chat-message assistant-message" >
178
+ < span className = "message-role" > LocalAI:</ span >
179
+ < span className = "message-content" >
180
+ { renderMessageContent ( currentAssistantMessage ) }
181
+ </ span >
182
+ </ div >
183
+ ) }
128
184
</ div >
129
185
</ div >
130
186
< div className = "chat-input" >
@@ -151,4 +207,4 @@ const [selectedModel, setSelectedModel] = useState(""); // Added state for selec
151
207
) ;
152
208
} ;
153
209
154
- export default ChatGptInterface ;
210
+ export default ChatGptInterface ;
0 commit comments