diff --git a/bedrock/invoke_claude_3.py b/bedrock/invoke_claude_3.py
index 2042a10..9245f64 100644
--- a/bedrock/invoke_claude_3.py
+++ b/bedrock/invoke_claude_3.py
@@ -14,7 +14,11 @@
MODEL_ID_TEXT = "anthropic.claude-3-5-sonnet-20240620-v1:0"
-SYSTEM_MESSAGE = "너는 사람들에게 친절하게 도움을 주는 구루미(Gurumi)야. 답변을 할때 참고한 문서가 있다면 링크도 알려줘."
+# Set up System messages
+PERSONAL_MESSAGE = os.environ.get(
+ "PERSONAL_MESSAGE", "당신은 친절하고 전문적인 AI 비서 입니다."
+)
+SYSTEM_MESSAGE = "답변을 할때 참고한 문서가 있다면 링크도 알려줘."
# Initialize the Amazon Bedrock runtime client
@@ -78,16 +82,14 @@ def main():
query = args.prompt
prompts = []
- prompts.append(
- "Human: You are a advisor AI system, and provides answers to questions by using fact based and statistical information when possible."
- )
- prompts.append(
- "If you don't know the answer, just say that you don't know, don't try to make up an answer."
- )
+ prompts.append("User: {}".format(PERSONAL_MESSAGE))
+ prompts.append("If you don't know the answer, just say that you don't know, don't try to make up an answer.")
if SYSTEM_MESSAGE != "None":
prompts.append(SYSTEM_MESSAGE)
+ prompts.append(" 태그로 감싸진 질문에 답변을 제공하세요.")
+
try:
# Add the question to the prompts
prompts.append("")
@@ -96,7 +98,6 @@ def main():
prompts.append("")
prompts.append("")
- # prompts.append("The response should be specific and use statistics or numbers when possible.")
prompts.append("Assistant:")
# Combine the prompts
diff --git a/bedrock/invoke_claude_3_image.py b/bedrock/invoke_claude_3_image.py
index 6a75f9e..3b80441 100644
--- a/bedrock/invoke_claude_3_image.py
+++ b/bedrock/invoke_claude_3_image.py
@@ -6,8 +6,6 @@
import boto3
import base64
-SYSTEM_MESSAGE = "너는 구름이(Gurumi) 야. 구름이는 한국어로 구름을 친숙하게 부르는 표현이야. AWSKRUG(AWS Korea User Group)의 마스코트지."
-
def parse_args():
p = argparse.ArgumentParser(description="invoke_claude_3")
@@ -62,9 +60,6 @@ def invoke_claude_3(prompt):
],
}
- if SYSTEM_MESSAGE:
- body["system"] = SYSTEM_MESSAGE
-
# print("request: {}".format(body))
response = bedrock.invoke_model(
diff --git a/bedrock/invoke_knowledge_base.py b/bedrock/invoke_knowledge_base.py
index fdf1a29..dba60bf 100644
--- a/bedrock/invoke_knowledge_base.py
+++ b/bedrock/invoke_knowledge_base.py
@@ -22,7 +22,11 @@
MODEL_ID_TEXT = "anthropic.claude-3-sonnet-20240229-v1:0"
-SYSTEM_MESSAGE = "너는 사람들에게 친절하게 도움을 주는 구루미(Gurumi)야. 답변을 할때 참고한 문서가 있다면 링크도 알려줘."
+# Set up System messages
+PERSONAL_MESSAGE = os.environ.get(
+ "PERSONAL_MESSAGE", "당신은 친절하고 전문적인 AI 비서 입니다."
+)
+SYSTEM_MESSAGE = "답변을 할때 참고한 문서가 있다면 링크도 알려줘."
# Initialize the Amazon Bedrock runtime client
@@ -133,23 +137,21 @@ def main():
query = args.prompt
prompts = []
- prompts.append(
- "Human: You are a advisor AI system, and provides answers to questions by using fact based and statistical information when possible."
- )
- prompts.append(
- "If you don't know the answer, just say that you don't know, don't try to make up an answer."
- )
+ prompts.append("User: {}".format(PERSONAL_MESSAGE))
+ prompts.append("If you don't know the answer, just say that you don't know, don't try to make up an answer.")
if SYSTEM_MESSAGE != "None":
prompts.append(SYSTEM_MESSAGE)
+ prompts.append(" 태그로 감싸진 질문에 답변을 제공하세요.")
+
try:
# Get the knowledge base contexts
if KNOWLEDGE_BASE_ID != "None":
contexts = invoke_knowledge_base(query)
prompts.append(
- "Use the following pieces of information to provide a concise answer to the question enclosed in tags."
+ " 에 정보가 제공 되면, 해당 정보를 사용하여 답변해 주세요."
)
prompts.append("")
prompts.append("\n\n".join(contexts))
@@ -162,7 +164,6 @@ def main():
prompts.append("")
prompts.append("")
- # prompts.append("The response should be specific and use statistics or numbers when possible.")
prompts.append("Assistant:")
# Combine the prompts
diff --git a/handler.py b/handler.py
index 912cbd6..91f8d00 100644
--- a/handler.py
+++ b/handler.py
@@ -12,8 +12,6 @@
from slack_bolt.adapter.aws_lambda import SlackRequestHandler
-BOT_CURSOR = os.environ.get("BOT_CURSOR", ":robot_face:")
-
AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")
# Set up Slack API credentials
@@ -47,6 +45,8 @@
MAX_LEN_SLACK = int(os.environ.get("MAX_LEN_SLACK", 3000))
MAX_LEN_BEDROCK = int(os.environ.get("MAX_LEN_BEDROCK", 4000))
+BOT_CURSOR = os.environ.get("BOT_CURSOR", ":robot_face:")
+
MSG_KNOWLEDGE = "지식 기반 검색 중... " + BOT_CURSOR
MSG_PREVIOUS = "이전 대화 내용 확인 중... " + BOT_CURSOR
MSG_RESPONSE = "응답 기다리는 중... " + BOT_CURSOR
@@ -307,8 +307,8 @@ def conversation(say: Say, thread_ts, query, channel, client_msg_id):
latest_ts = result["ts"]
prompts = []
- prompts.append("Human: {}".format(PERSONAL_MESSAGE))
- prompts.append("답변을 모르면 모른다고 하세요. 답을 지어내려고 하지 마세요.")
+ prompts.append("User: {}".format(PERSONAL_MESSAGE))
+ prompts.append("If you don't know the answer, just say that you don't know, don't try to make up an answer.")
if SYSTEM_MESSAGE != "None":
prompts.append(SYSTEM_MESSAGE)