File tree Expand file tree Collapse file tree 3 files changed +7
-5
lines changed Expand file tree Collapse file tree 3 files changed +7
-5
lines changed Original file line number Diff line number Diff line change
1
+ [pytest]
2
+ asyncio_default_fixture_loop_scope = function
Original file line number Diff line number Diff line change 6
6
7
7
import yaml
8
8
from fastapi import HTTPException
9
- from pythonjsonlogger import jsonlogger
9
+ from pythonjsonlogger . json import JsonFormatter
10
10
11
11
log_handler = logging .StreamHandler ()
12
- log_handler .setFormatter (jsonlogger . JsonFormatter ())
12
+ log_handler .setFormatter (JsonFormatter ())
13
13
logging .basicConfig (level = logging .INFO , handlers = [log_handler ])
14
14
logger = logging .getLogger (__name__ )
15
15
@@ -53,7 +53,7 @@ def load_responses(self) -> None:
53
53
def get_response (self , prompt : str ) -> str :
54
54
"""Get response for a given prompt."""
55
55
self .load_responses () # Check for updates
56
- return self .responses .get (prompt , self .default_response )
56
+ return self .responses .get (prompt , self .default_response )
57
57
58
58
def get_streaming_response (
59
59
self , prompt : str , chunk_size : Optional [int ] = None
Original file line number Diff line number Diff line change 4
4
import tiktoken
5
5
from fastapi import FastAPI , HTTPException
6
6
from fastapi .responses import StreamingResponse
7
- from pythonjsonlogger import jsonlogger
7
+ from pythonjsonlogger . json import JsonFormatter
8
8
9
9
from .config import ResponseConfig
10
10
from .models import (
20
20
from .providers .openai import OpenAIProvider
21
21
22
22
log_handler = logging .StreamHandler ()
23
- log_handler .setFormatter (jsonlogger . JsonFormatter ())
23
+ log_handler .setFormatter (JsonFormatter ())
24
24
logging .basicConfig (level = logging .INFO , handlers = [log_handler ])
25
25
logger = logging .getLogger (__name__ )
26
26
You can’t perform that action at this time.
0 commit comments