Skip to content

Commit

Permalink
fix(llm): answer prompt None value effect (apache#116)
Browse files Browse the repository at this point in the history
Solve the problem that the final result is not as expected when the `answer_prompt` in the body parameter of `/rag` api is `None` or an empty string

---------

Co-authored-by: imbajin <[email protected]>
  • Loading branch information
returnToInnocence and imbajin authored Nov 19, 2024
1 parent 943e750 commit 153d7f4
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions hugegraph-llm/src/hugegraph_llm/api/models/rag_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class RAGRequest(BaseModel):
rerank_method: Literal["bleu", "reranker"] = "bleu"
near_neighbor_first: bool = False
custom_priority_info: str = ""
answer_prompt: str = ""
answer_prompt: Optional[str] = None


class GraphRAGRequest(BaseModel):
Expand All @@ -43,7 +43,7 @@ class GraphRAGRequest(BaseModel):
rerank_method: Literal["bleu", "reranker"] = "bleu"
near_neighbor_first: bool = False
custom_priority_info: str = ""
answer_prompt: str = ""
answer_prompt: Optional[str] = None


class GraphConfigRequest(BaseModel):
Expand Down
4 changes: 2 additions & 2 deletions hugegraph-llm/src/hugegraph_llm/api/rag_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
RerankerConfigRequest, GraphRAGRequest,
)
from hugegraph_llm.api.models.rag_response import RAGResponse
from hugegraph_llm.config import settings
from hugegraph_llm.config import settings, prompt
from hugegraph_llm.utils.log import log


Expand Down Expand Up @@ -63,7 +63,7 @@ def rag_answer_api(req: RAGRequest):
req.rerank_method,
req.near_neighbor_first,
req.custom_priority_info,
req.answer_prompt
req.answer_prompt or prompt.answer_prompt
)
return {
key: value
Expand Down

0 comments on commit 153d7f4

Please sign in to comment.