Skip to content

Commit b10ebc8

Browse files
author
Bogdan Tsechoev
committed
get models from backend, save current modal in localstorage
1 parent 8320fb6 commit b10ebc8

File tree

6 files changed

+137
-51
lines changed

6 files changed

+137
-51
lines changed
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import {request} from "../../helpers/request";
2+
import { LLMModel } from "../../types/api/entities/bot";
3+
4+
export const getLLMModels = async (): Promise<{ response: LLMModel[] | null; error: Response | null }> => {
5+
const apiServer = process.env.REACT_APP_API_URL_PREFIX || '';
6+
7+
try {
8+
const response = await request(`${apiServer}/llm_models`, {
9+
method: 'GET',
10+
});
11+
12+
if (!response.ok) {
13+
return { response: null, error: response };
14+
}
15+
16+
const responseData: LLMModel[] = await response.json();
17+
18+
return { response: responseData, error: null };
19+
20+
} catch (error) {
21+
return { response: null, error: error as Response };
22+
}
23+
}

ui/packages/platform/src/pages/Bot/SettingsDialog/SettingsDialog.tsx

Lines changed: 26 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ import { Spinner } from '@postgres.ai/shared/components/Spinner'
2727
import { colors } from "@postgres.ai/shared/styles/colors";
2828
import FormLabel from '@mui/material/FormLabel'
2929
import { Model } from '../hooks'
30+
import { LLMModel } from "../../../types/api/entities/bot";
3031

3132
export type Visibility = 'public' | 'private';
3233

@@ -46,6 +47,7 @@ type PublicChatDialogProps = {
4647
onSaveChanges: SaveChangesFunction
4748
isLoading: boolean
4849
threadId: string | null
50+
llmModels: LLMModel[] | null
4951
}
5052

5153
const useDialogTitleStyles = makeStyles(
@@ -173,7 +175,8 @@ export const SettingsDialog = (props: PublicChatDialogProps) => {
173175
onClose,
174176
isOpen,
175177
isLoading,
176-
threadId
178+
threadId,
179+
llmModels
177180
} = props;
178181

179182
const [visibility, setVisibility] = useState<Visibility>(defaultVisibility);
@@ -273,38 +276,32 @@ export const SettingsDialog = (props: PublicChatDialogProps) => {
273276
label="Anyone with a special link and members of the organization can view"
274277
/>
275278
</RadioGroup>
276-
{/*{shareUrl.remark && (
277-
<Typography className={classes.remark}>
278-
<span className={classes.remarkIcon}>{icons.warningIcon}</span>
279-
{shareUrl.remark}
280-
</Typography>
281-
)}*/}
282279
{visibility && (
283280
<div className={classes.urlContainer}>{urlField}</div>
284281
)}
285282
</>}
286-
<FormLabel component="legend">Model</FormLabel>
287-
<RadioGroup
288-
aria-label="model"
289-
name="model"
290-
value={model}
291-
onChange={(event) => {
292-
setModel(event.target.value as Model)
293-
}}
294-
className={classes.radioLabel}
295-
>
296-
<FormControlLabel
297-
value="gemini"
298-
control={<Radio />}
299-
label="gemini-1.5-pro"
300-
/>
301-
302-
<FormControlLabel
303-
value="gpt"
304-
control={<Radio />}
305-
label="gpt-4-turbo"
306-
/>
307-
</RadioGroup>
283+
{llmModels && <>
284+
<FormLabel component="legend">Model</FormLabel>
285+
<RadioGroup
286+
aria-label="model"
287+
name="model"
288+
value={model}
289+
onChange={(event) => {
290+
setModel(event.target.value as Model)
291+
}}
292+
className={classes.radioLabel}
293+
>
294+
{llmModels.map((model) =>
295+
<FormControlLabel
296+
key={`${model.vendor}/${model.name}`}
297+
value={`${model.vendor}/${model.name}`}
298+
control={<Radio />}
299+
label={model.name}
300+
/>
301+
)
302+
}
303+
</RadioGroup>
304+
</>}
308305
</DialogContent>
309306

310307
<DialogActions>

ui/packages/platform/src/pages/Bot/hooks.ts

Lines changed: 74 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -5,23 +5,26 @@
55
*--------------------------------------------------------------------------
66
*/
77

8-
import { Dispatch, SetStateAction, useCallback, useEffect, useState } from "react";
8+
import { useCallback, useEffect, useState } from "react";
99
import useWebSocket, {ReadyState} from "react-use-websocket";
1010
import { useLocation } from "react-router-dom";
11-
import {BotMessage} from "../../types/api/entities/bot";
11+
import { BotMessage, LLMModel } from "../../types/api/entities/bot";
1212
import {getChatsWithWholeThreads} from "../../api/bot/getChatsWithWholeThreads";
1313
import {getChats} from "api/bot/getChats";
1414
import {useAlertSnackbar} from "@postgres.ai/shared/components/AlertSnackbar/useAlertSnackbar";
1515
import {localStorage} from "../../helpers/localStorage";
1616
import { makeChatPublic } from "../../api/bot/makeChatPublic";
17-
import { aiModelBuilder } from "./utils";
17+
import { getLLMModels } from "../../api/bot/getLLMModels";
1818

1919

2020
const WS_URL = process.env.REACT_APP_WS_URL || '';
2121

22-
export type Model = 'oai' | 'gcp'
22+
export type Model = `${LLMModel['vendor']}/${LLMModel['name']}`
2323

24-
export const DEFAULT_MODEL: Model = 'gcp'
24+
const DEFAULT_MODEL_NAME = 'gemini-1.5-pro'
25+
const DEFAULT_VENDOR = 'gcp';
26+
27+
export const DEFAULT_MODEL: Model = `${DEFAULT_VENDOR}/${DEFAULT_MODEL_NAME}`
2528

2629
type ErrorType = {
2730
code?: number;
@@ -47,8 +50,9 @@ type UseAiBotReturnType = {
4750
changeChatVisibility: (threadId: string, isPublic: boolean) => void;
4851
isChangeVisibilityLoading: boolean;
4952
unsubscribe: (threadId: string) => void;
50-
model: Model,
51-
setModel: Dispatch<SetStateAction<Model>>
53+
model: UseLLMModelsList['model'],
54+
setModel: UseLLMModelsList['setModel']
55+
llmModels: UseLLMModelsList['llmModels']
5256
}
5357

5458
type UseAiBotArgs = {
@@ -60,14 +64,15 @@ type UseAiBotArgs = {
6064
export const useAiBot = (args: UseAiBotArgs): UseAiBotReturnType => {
6165
const { threadId, onChatLoadingError } = args;
6266
const { showMessage, closeSnackbar } = useAlertSnackbar();
67+
const { llmModels, model, setModel } = useLLMModelsList();
6368
let location = useLocation<{skipReloading?: boolean}>();
6469

6570
const [messages, setMessages] = useState<BotMessage[] | null>(null);
6671
const [isLoading, setLoading] = useState<boolean>(false);
6772
const [error, setError] = useState<ErrorType | null>(null);
6873
const [wsLoading, setWsLoading] = useState<boolean>(false);
6974
const [isChangeVisibilityLoading, setIsChangeVisibilityLoading] = useState<boolean>(false);
70-
const [model, setModel] = useState<Model>(DEFAULT_MODEL);
75+
7176

7277
const token = localStorage.getAuthToken()
7378

@@ -144,7 +149,6 @@ export const useAiBot = (args: UseAiBotArgs): UseAiBotReturnType => {
144149
subscribe(threadId)
145150
if (response && response.length > 0) {
146151
setMessages(response);
147-
setModel(aiModelBuilder(response?.[response.length - 1]?.ai_model || ''))
148152
} else {
149153
if (onChatLoadingError) onChatLoadingError();
150154
setError({
@@ -239,7 +243,6 @@ export const useAiBot = (args: UseAiBotArgs): UseAiBotReturnType => {
239243

240244
const clearChat = () => {
241245
setMessages(null);
242-
setModel(DEFAULT_MODEL);
243246
}
244247

245248
const changeChatVisibility = async (threadId: string, isPublic: boolean) => {
@@ -307,7 +310,8 @@ export const useAiBot = (args: UseAiBotArgs): UseAiBotReturnType => {
307310
messages,
308311
unsubscribe,
309312
model,
310-
setModel
313+
setModel,
314+
llmModels
311315
}
312316
}
313317

@@ -360,4 +364,63 @@ export const useBotChatsList = (orgId?: number): UseBotChatsListHook => {
360364
getChatsList,
361365
loading: isLoading
362366
}
367+
}
368+
369+
type UseLLMModelsList = {
370+
llmModels: LLMModel[] | null
371+
error: Response | null
372+
model: Model
373+
setModel: (model: Model) => void
374+
}
375+
376+
const useLLMModelsList = (): UseLLMModelsList => {
377+
const [llmModels, setLLMModels] = useState<UseLLMModelsList['llmModels']>(null);
378+
const [error, setError] = useState<Response | null>(null);
379+
const [userModel, setUserModel] = useState<Model | null>(null)
380+
381+
const getModels = useCallback(async () => {
382+
let models = null;
383+
try {
384+
const { response } = await getLLMModels();
385+
setLLMModels(response)
386+
const currentModel = window.localStorage.getItem('bot.llm_model')
387+
388+
if (currentModel && currentModel !== userModel) {
389+
setUserModel(currentModel as Model)
390+
} else if (!currentModel) {
391+
setModel(DEFAULT_MODEL)
392+
}
393+
} catch (e) {
394+
setError(e as unknown as Response)
395+
}
396+
return models
397+
}, []);
398+
399+
useEffect(() => {
400+
let isCancelled = false;
401+
402+
getModels()
403+
.catch((e) => {
404+
if (!isCancelled) {
405+
setError(e);
406+
}
407+
});
408+
return () => {
409+
isCancelled = true;
410+
};
411+
}, [getModels]);
412+
413+
const setModel = (model: Model) => {
414+
if (model !== userModel) {
415+
setUserModel(model);
416+
window.localStorage.setItem('bot.llm_model', model)
417+
}
418+
}
419+
420+
return {
421+
llmModels,
422+
error,
423+
setModel,
424+
model: userModel || DEFAULT_MODEL,
425+
}
363426
}

ui/packages/platform/src/pages/Bot/index.tsx

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,11 @@ import { Messages } from './Messages/Messages';
1717
import { Command } from './Command/Command';
1818
import { ChatsList } from "./ChatsList/ChatsList";
1919
import { BotWrapperProps } from "./BotWrapper";
20-
import { useBotChatsList, useAiBot, Model } from "./hooks";
20+
import { useBotChatsList, useAiBot } from "./hooks";
2121
import { usePrev } from "../../hooks/usePrev";
2222
import {HeaderButtons} from "./HeaderButtons/HeaderButtons";
2323
import settings from "../../utils/settings";
24-
import { SaveChangesFunction, SettingsDialog, Visibility } from "./SettingsDialog/SettingsDialog";
24+
import { SaveChangesFunction, SettingsDialog } from "./SettingsDialog/SettingsDialog";
2525
import { theme } from "@postgres.ai/shared/styles/theme";
2626
import { colors } from "@postgres.ai/shared/styles/colors";
2727
import { SettingsWithLabel } from "./SettingsWithLabel/SettingsWithLabel";
@@ -105,7 +105,8 @@ export const BotPage = (props: BotPageProps) => {
105105
changeChatVisibility,
106106
unsubscribe,
107107
model,
108-
setModel
108+
setModel,
109+
llmModels
109110
} = useAiBot({
110111
threadId: match.params.threadId,
111112
});
@@ -226,6 +227,7 @@ export const BotPage = (props: BotPageProps) => {
226227
onClose={toggleSettingsDialog}
227228
onSaveChanges={handleSaveSettings}
228229
threadId={match.params.threadId || null}
230+
llmModels={llmModels}
229231
/>
230232
<ChatsList
231233
isOpen={isChatsListVisible}

ui/packages/platform/src/pages/Bot/utils.ts

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,9 @@
66
*/
77

88
import { API_URL_PREFIX } from "../../config/env";
9-
import { DEFAULT_MODEL, Model } from "./hooks";
109

1110
export const permalinkLinkBuilder = (id: string): string => {
1211
const apiUrl = process.env.REACT_APP_API_URL_PREFIX || API_URL_PREFIX;
1312
const isV2API = /https?:\/\/.*v2\.postgres\.ai\b/.test(apiUrl);
1413
return `https://${isV2API ? 'v2.' : ''}postgres.ai/chats/${id}`;
15-
};
16-
17-
18-
export const aiModelBuilder = (model: string): Model => {
19-
return /oai|gpt/.test(model) ? "oai" : /gemini|gcp/.test(model) ? "gcp" : DEFAULT_MODEL;
20-
}
14+
};

ui/packages/platform/src/types/api/entities/bot.ts

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,11 @@ export type BotMessage = {
1414
org_id: string
1515
thread_id: string
1616
ai_model: string
17-
}
17+
}
18+
19+
export type LLMModel = {
20+
name: string;
21+
vendor: string;
22+
isThirdParty: boolean;
23+
freeUseAvailable: boolean;
24+
};

0 commit comments

Comments
 (0)