Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Portkey Integration with Anything LLM #2659

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions docker/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,18 @@ GID='1000'
# OPEN_AI_KEY=
# OPEN_MODEL_PREF='gpt-4o'


# Portkey Settings
# LLM_PROVIDER='portkey'
# PORTKEY_MODEL_PREF='gpt-3.5-turbo'
# PORTKEY_MODEL_TOKEN_LIMIT=4096
# PORTKEY_BASE_PATH='https://api.portkey.ai/v1'
# PORTKEY_API_KEY='pk-xxxxxxxx'
# PORTKEY_CONFIG_SLUG='my-config' # Optional: Either this or VIRTUAL_KEY is required
# PORTKEY_VIRTUAL_KEY='vk-xxxxxxxx' # Optional: Either this or CONFIG_SLUG is required



# LLM_PROVIDER='gemini'
# GEMINI_API_KEY=
# GEMINI_LLM_MODEL_PREF='gemini-pro'
Expand Down
149 changes: 149 additions & 0 deletions frontend/src/components/LLMSelection/PortkeyOptions/index.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
import React, { useState } from 'react';

function PortkeyModelSelection({ settings }) {
// Static list of models supported by Portkey
const availableModels = [
{ id: "gpt-4", name: "GPT-4" },
{ id: "gpt-4-turbo", name: "GPT-4 Turbo" },
{ id: "gpt-3.5-turbo", name: "GPT-3.5 Turbo" },
{ id: "claude-2", name: "Claude 2" },
{ id: "claude-instant-1", name: "Claude Instant" },
{ id: "palm-2", name: "PaLM 2" },
{ id: "gemini-pro", name: "Gemini Pro" },
{ id: "mistral-medium", name: "Mistral Medium" },
{ id: "llama-2-70b", name: "Llama 2 70B" }
];

return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Chat Model Selection
</label>
<select
name="PortkeyModelPref"
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
defaultValue={settings?.PortkeyModelPref || "gpt-3.5-turbo"}
>
<optgroup label="Available Models">
{availableModels.map((model) => (
<option
key={model.id}
value={model.id}
selected={settings?.PortkeyModelPref === model.id}
>
{model.name}
</option>
))}
</optgroup>
</select>
</div>
);
}

export default function PortkeyOptions({ settings }) {
const [basePathValue, setBasePathValue] = useState(settings?.PortkeyBasePath);
const [basePath, setBasePath] = useState(settings?.PortkeyBasePath);
const [apiKeyValue, setApiKeyValue] = useState(settings?.PortkeyAPIKey);
const [apiKey, setApiKey] = useState(settings?.PortkeyAPIKey);
const [configSlugValue, setConfigSlugValue] = useState(settings?.PortkeyConfigSlug);
const [virtualKeyValue, setVirtualKeyValue] = useState(settings?.PortkeyVirtualKey);

return (
<div className="w-full flex flex-col gap-y-4">
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Base URL
</label>
<input
type="url"
name="PortkeyBasePath"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="https://api.portkey.ai/v1"
defaultValue={settings?.PortkeyBasePath}
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={() => setBasePath(basePathValue)}
/>
</div>
<PortkeyModelSelection settings={settings} />
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Token context window
</label>
<input
type="number"
name="PortkeyTokenLimit"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="4096"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.PortkeyTokenLimit}
required={true}
autoComplete="off"
/>
</div>
</div>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
API Key
</label>
</div>
<input
type="password"
name="PortkeyAPIKey"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="pk-xxxxx"
defaultValue={settings?.PortkeyAPIKey ? "*".repeat(20) : ""}
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
</div>
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
Config Slug
<small className="text-gray-400">(Required if Virtual Key not provided)</small>
</label>
</div>
<input
type="text"
name="PortkeyConfigSlug"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="my-config-slug"
defaultValue={settings?.PortkeyConfigSlug || ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setConfigSlugValue(e.target.value)}
/>
</div>
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
Virtual Key
<small className="text-gray-400">(Required if Config Slug not provided)</small>
</label>
</div>
<input
type="text"
name="PortkeyVirtualKey"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="vk-xxxxx"
defaultValue={settings?.PortkeyVirtualKey || ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setVirtualKeyValue(e.target.value)}
/>
</div>
</div>
</div>
);
}
Binary file added frontend/src/media/llmprovider/portkey.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,14 @@ const EMBEDDERS = [
options: (settings) => <VoyageAiOptions settings={settings} />,
description: "Run powerful embedding models from Voyage AI.",
},
{
name: "Portkey",
value: "portkey",
logo: VoyageAiLogo,
options: (settings) => <PortkeyOptions settings={settings} />,
description: "Connect to various LLMs through Portkey's routing service.",
requiredConfig: ["PortkeyBasePath", "PortkeyApiKey"],
},
{
name: "LiteLLM",
value: "litellm",
Expand Down
10 changes: 10 additions & 0 deletions frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import KoboldCPPLogo from "@/media/llmprovider/koboldcpp.png";
import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png";
import CohereLogo from "@/media/llmprovider/cohere.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import PortkeyLogo from "@/media/llmprovider/portkey.png";
import AWSBedrockLogo from "@/media/llmprovider/bedrock.png";
import DeepSeekLogo from "@/media/llmprovider/deepseek.png";
import APIPieLogo from "@/media/llmprovider/apipie.png";
Expand All @@ -46,6 +47,7 @@ import FireworksAiOptions from "@/components/LLMSelection/FireworksAiOptions";
import MistralOptions from "@/components/LLMSelection/MistralOptions";
import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
import PerplexityOptions from "@/components/LLMSelection/PerplexityOptions";
import PortkeyOptions from "@/components/LLMSelection/PortkeyOptions";
import OpenRouterOptions from "@/components/LLMSelection/OpenRouterOptions";
import GroqAiOptions from "@/components/LLMSelection/GroqAiOptions";
import CohereAiOptions from "@/components/LLMSelection/CohereAiOptions";
Expand Down Expand Up @@ -149,6 +151,14 @@ export const AVAILABLE_LLM_PROVIDERS = [
description: "Run open source models from Together AI.",
requiredConfig: ["TogetherAiApiKey"],
},
{
name: "Portkey",
value: "portkey",
logo: PortkeyLogo,
options: (settings) => <PortkeyOptions settings={settings} />,
description: "Connect to various LLMs through Portkey's routing service.",
requiredConfig: ["PortkeyBasePath", "PortkeyApiKey"],
},
{
name: "Fireworks AI",
value: "fireworksai",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import GroqLogo from "@/media/llmprovider/groq.png";
import KoboldCPPLogo from "@/media/llmprovider/koboldcpp.png";
import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import PortkeyLogo from "@/media/llmprovider/portkey.png";
import AWSBedrockLogo from "@/media/llmprovider/bedrock.png";
import DeepSeekLogo from "@/media/llmprovider/deepseek.png";
import APIPieLogo from "@/media/llmprovider/apipie.png";
Expand Down Expand Up @@ -112,6 +113,13 @@ export const LLM_SELECTION_PRIVACY = {
],
logo: TogetherAILogo,
},
portkey: {
name: "Portkey AI",
description: [
"Your model and chats are not visible to Portkey when using the OpenSource Version",
],
logo: PortkeyLogo,
},
fireworksai: {
name: "FireworksAI",
description: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import GroqLogo from "@/media/llmprovider/groq.png";
import KoboldCPPLogo from "@/media/llmprovider/koboldcpp.png";
import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import PortkeyLogo from "@/media/llmprovider/portkey.png";
import AWSBedrockLogo from "@/media/llmprovider/bedrock.png";
import DeepSeekLogo from "@/media/llmprovider/deepseek.png";
import APIPieLogo from "@/media/llmprovider/apipie.png";
Expand Down Expand Up @@ -57,6 +58,7 @@ import System from "@/models/system";
import paths from "@/utils/paths";
import showToast from "@/utils/toast";
import { useNavigate } from "react-router-dom";
import PortkeyOptions from "@/components/LLMSelection/PortkeyOptions";

const TITLE = "LLM Preference";
const DESCRIPTION =
Expand Down Expand Up @@ -202,6 +204,13 @@ const LLMS = [
options: (settings) => <LiteLLMOptions settings={settings} />,
description: "Run LiteLLM's OpenAI compatible proxy for various LLMs.",
},
{
name: "Portkey",
value: "portkey",
logo: PortkeyLogo,
options: (settings) => <PortkeyOptions settings={settings} />,
description: "Run Portkey OpenAI compatible proxy for various LLMs.",
},
{
name: "DeepSeek",
value: "deepseek",
Expand Down
Loading