Skip to content

Commit

Permalink
Fixed async gather operation for posts search (#2298)
Browse files Browse the repository at this point in the history
  • Loading branch information
hlbmtc authored Feb 28, 2025
1 parent 6a1f724 commit c1ce38a
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
7 changes: 4 additions & 3 deletions posts/services/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import logging

import numpy as np
from asgiref.sync import async_to_sync
from django.contrib.postgres.search import SearchVector, SearchQuery
from django.db.models import Value, Case, When, FloatField, QuerySet
from pgvector.django import CosineDistance
Expand Down Expand Up @@ -66,9 +67,9 @@ def update_post_search_embedding_vector(post: Post):


def perform_post_search(qs, search_text: str):
embedding_vector, semantic_scores_by_id = asyncio.run(
gather_search_results(search_text)
)
embedding_vector, semantic_scores_by_id = async_to_sync(
gather_search_results
)(search_text)
semantic_scores_by_id = semantic_scores_by_id or {}

semantic_whens = [
Expand Down
7 changes: 4 additions & 3 deletions utils/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,10 @@ def generate_text_embed_vector(text: str) -> list[float]:


async def generate_text_embed_vector_async(text: str) -> list[float]:
response = await get_openai_client_async().embeddings.create(
input=text, model=EMBEDDING_MODEL
)
async with get_openai_client_async() as client:
response = await client.embeddings.create(
input=text, model=EMBEDDING_MODEL
)
vector = response.data[0].embedding

return vector
Expand Down

0 comments on commit c1ce38a

Please sign in to comment.