Skip to content

Commit

Permalink
Merge pull request #21 from nalbam/main
Browse files Browse the repository at this point in the history
chore: Update conversation function to handle multiple contexts
  • Loading branch information
nalbam authored Aug 27, 2024
2 parents 234ee29 + f2de139 commit c0afbdb
Show file tree
Hide file tree
Showing 3 changed files with 194 additions and 131 deletions.
88 changes: 61 additions & 27 deletions bedrock/invoke_claude_3.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,21 @@
import argparse
import json
import boto3
import os

SYSTEM_MESSAGE = "너는 구름이(Gurumi) 야. 구름이는 한국어로 구름을 친숙하게 부르는 표현이야. AWSKRUG(AWS Korea User Group)의 마스코트지."

AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")

ANTHROPIC_VERSION = os.environ.get("ANTHROPIC_VERSION", "bedrock-2023-05-31")
ANTHROPIC_TOKENS = int(os.environ.get("ANTHROPIC_TOKENS", 1024))

MODEL_ID_TEXT = "anthropic.claude-3-5-sonnet-20240620-v1:0"

SYSTEM_MESSAGE = "너는 사람들에게 친절하게 도움을 주는 구루미(Gurumi)야. 답변을 할때 참고한 문서가 있다면 링크도 알려줘."


# Initialize the Amazon Bedrock runtime client
bedrock = boto3.client(service_name="bedrock-runtime", region_name=AWS_REGION)


def parse_args():
Expand All @@ -24,57 +37,78 @@ def invoke_claude_3(prompt):
:return: Inference response from the model.
"""

# Initialize the Amazon Bedrock runtime client
bedrock = boto3.client(service_name="bedrock-runtime", region_name="us-east-1")

# Invoke Claude 3 with the text prompt
model_id = "anthropic.claude-3-sonnet-20240229-v1:0"

try:
body = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1024,
"anthropic_version": ANTHROPIC_VERSION,
"max_tokens": ANTHROPIC_TOKENS,
"messages": [
{
"role": "user",
"content": [
{
"type": "text",
"text": prompt,
},
],
"content": [{"type": "text", "text": prompt}],
},
],
}

if SYSTEM_MESSAGE:
body["system"] = SYSTEM_MESSAGE

# print("request: {}".format(body))

response = bedrock.invoke_model(
modelId=model_id,
modelId=MODEL_ID_TEXT,
body=json.dumps(body),
)

# Process and print the response
body = json.loads(response.get("body").read())

# print("response: {}".format(body))
print("response: {}".format(body))

result = body.get("content", [])

content = body.get("content", [])
for output in result:
text = output["text"]

for output in content:
print(output["text"])
return text

except Exception as e:
print("Error: {}".format(e))
print("invoke_claude_3: Error: {}".format(e))

raise e


def main():
args = parse_args()

invoke_claude_3(args.prompt)
query = args.prompt

prompts = []
prompts.append(
"Human: You are a advisor AI system, and provides answers to questions by using fact based and statistical information when possible."
)
prompts.append(
"If you don't know the answer, just say that you don't know, don't try to make up an answer."
)

if SYSTEM_MESSAGE != "None":
prompts.append(SYSTEM_MESSAGE)

try:
# Add the question to the prompts
prompts.append("")
prompts.append("<question>")
prompts.append(query)
prompts.append("</question>")
prompts.append("")

# prompts.append("The response should be specific and use statistics or numbers when possible.")
prompts.append("Assistant:")

# Combine the prompts
prompt = "\n".join(prompts)

# Send the prompt to Bedrock
message = invoke_claude_3(prompt)

print("conversation: message: {}".format(message))

except Exception as e:
print("conversation: error: {}".format(e))


if __name__ == "__main__":
Expand Down
Loading

0 comments on commit c0afbdb

Please sign in to comment.