Skip to content

Commit

Permalink
Implement default in-memory store for ChatCompletionCache (#5188)
Browse files Browse the repository at this point in the history
  • Loading branch information
srjoglekar246 authored Jan 25, 2025
1 parent 6702985 commit 8926206
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 8 deletions.
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import asyncio
import functools
import warnings
from textwrap import dedent
from typing import Any, Callable, Sequence
import warnings

from pydantic import BaseModel
from typing_extensions import Self
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import warnings
from typing import Any, AsyncGenerator, List, Mapping, Optional, Sequence, Union, cast

from autogen_core import CacheStore, CancellationToken
from autogen_core import CacheStore, CancellationToken, InMemoryStore
from autogen_core.models import (
ChatCompletionClient,
CreateResult,
Expand Down Expand Up @@ -74,11 +74,16 @@ async def main():
client (ChatCompletionClient): The original ChatCompletionClient to wrap.
store (CacheStore): A store object that implements get and set methods.
The user is responsible for managing the store's lifecycle & clearing it (if needed).
Defaults to using in-memory cache.
"""

def __init__(self, client: ChatCompletionClient, store: CacheStore[CHAT_CACHE_VALUE_TYPE]):
def __init__(
self,
client: ChatCompletionClient,
store: Optional[CacheStore[CHAT_CACHE_VALUE_TYPE]] = None,
):
self.client = client
self.store = store
self.store = store or InMemoryStore[CHAT_CACHE_VALUE_TYPE]()

def _check_cache(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,14 @@
from typing import List, Tuple, Union

import pytest
from autogen_core import InMemoryStore
from autogen_core.models import (
ChatCompletionClient,
CreateResult,
LLMMessage,
SystemMessage,
UserMessage,
)
from autogen_ext.models.cache import CHAT_CACHE_VALUE_TYPE, ChatCompletionCache
from autogen_ext.models.cache import ChatCompletionCache
from autogen_ext.models.replay import ReplayChatCompletionClient


Expand All @@ -21,8 +20,7 @@ def get_test_data() -> Tuple[list[str], list[str], SystemMessage, ChatCompletion
system_prompt = SystemMessage(content="This is a system prompt")
replay_client = ReplayChatCompletionClient(responses)
replay_client.set_cached_bool_value(False)
store = InMemoryStore[CHAT_CACHE_VALUE_TYPE]()
cached_client = ChatCompletionCache(replay_client, store)
cached_client = ChatCompletionCache(replay_client)

return responses, prompts, system_prompt, replay_client, cached_client

Expand Down

0 comments on commit 8926206

Please sign in to comment.