Skip to content

Instantly share code, notes, and snippets.

@darko-mesaros
Created August 8, 2024 21:09
Show Gist options
  • Save darko-mesaros/c6f7f34cd97f9c16c7e45482d3d4c26d to your computer and use it in GitHub Desktop.
Save darko-mesaros/c6f7f34cd97f9c16c7e45482d3d4c26d to your computer and use it in GitHub Desktop.
import boto3
from InquirerPy import inquirer
# Create an Amazon Bedrock Runtime client
bedrock_client = boto3.client('bedrock-runtime')
# dictionary that contains multiple model ids
model_ids = [
"mistral.mistral-7b-instruct-v0:2",
"mistral.mistral-large-2402-v1:0",
"mistral.mistral-large-2407-v1:0",
"mistral.mixtral-8x7b-instruct-v0:1",
"meta.llama3-70b-instruct-v1:0",
"meta.llama3-8b-instruct-v1:0",
"meta.llama3-1-8b-instruct-v1:0",
]
# Prompt the user to select a model
selected_model_name = inquirer.fuzzy(
message="Select a model:",
choices=model_ids
).execute()
# Prompt a user for a question
print("Enter your question (or 'q' to quit):")
user_input = input("> ")
if user_input.lower() == 'q':
exit()
else:
user_message = user_input
system_prompts = [{"text" : "You are an expert in all things Amazon Bedrock. You are a helpful assistant. But every explanation that you give uses food related puns."}]
# Create the initial conversation with the user's message
conversation = [
{
"role": "user",
"content": [{"text": user_message}]
}
]
try:
# Send the message to the model using the Converse API
response = bedrock_client.converse_stream(
modelId=selected_model_name,
messages=conversation,
system=system_prompts,
inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}
)
# Process the streaming response
stream = response.get('stream')
if stream:
for event in stream:
if 'messageStart' in event:
print(f"\nRole: {event['messageStart']['role']}")
if 'contentBlockDelta' in event:
print(event['contentBlockDelta']['delta']['text'], end="")
except Exception as e:
print(f"Error: {e}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment