Skip to content

Instantly share code, notes, and snippets.

@p-i-
Last active September 3, 2024 20:46
Show Gist options
  • Save p-i-/58c09fa0df693edffaa5a89d0f492b2d to your computer and use it in GitHub Desktop.
Save p-i-/58c09fa0df693edffaa5a89d0f492b2d to your computer and use it in GitHub Desktop.
Minimal wrapper around Anthropic's Claude HTTP API
from pathlib import Path
from os import environ
from time import sleep
from textwrap import dedent
import nestedtext as nt
import httpx
import arrow
API_KEY = environ.get('ANTHROPIC_API_KEY', None)
if not API_KEY:
print('API key not found in environment variables')
exit(1)
API_URL = 'https://api.anthropic.com/v1/messages'
HEADERS = {
'x-api-key': API_KEY,
'anthropic-version': '2023-06-01',
'anthropic-beta': 'tools-2024-04-04',
'content-type': 'application/json',
}
# Complete list of models here: https://github.com/anthropics/anthropic-sdk-python/blob/246a2978694b584429d4bbd5b44245ff8eac2ac2/src/anthropic/resources/messages.py#L778
MODELS = {
'HAIKU': 'claude-3-haiku-20240307',
'SONNET': 'claude-3-sonnet-20240229',
'OPUS': 'claude-3-opus-20240229',
}
MAX_ATTEMPTS = 5
class ClaudeException(Exception):
pass
def exponential_backoff(attempt, base=1.5, initial=1, cap=60):
return (delay_s := min(initial * (base ** attempt), cap))
def light_for(code):
return {
# 🟒 Success
200: '🟒',
# 🟑 worth retrying
500: '🟑', # Internal Server Error - Issue on the server side
529: '🟑', # Server Overloaded - Temporary server overload
# 🟠 don't bother retrying, but maybe BoA can fix
400: '🟠', # Bad Request - Invalid syntax or query
404: '🟠', # Not Found - Endpoint or resource not found
# πŸ”΄ don't bother retrying, and most likely BoA can't fix
401: 'πŸ”΄', # Unauthorized - Invalid API key or credentials
403: 'πŸ”΄', # Forbidden - Permissions issue
429: 'πŸ”΄', # Too Many Requests - Rate limiting has been applied
# πŸ¦„ unexpected status code
}.get(code, 'πŸ¦„')
# ⛔️ If the request errored (e.g. no network) and we didn't even get a response
#
# Normally we set our OUTPUT light the same as the light of the first try.
# 2 exceptions:
# 1. success after multiple tries -> 🟑
# 2. failure after multiple tries -> ❌
LOGDIR = Path(__file__).resolve().parents[0 if __name__ == '__main__' else 3] \
/ 'logs' / 'http-requests'
def query_AI(model, system, messages, tools=None, temperature=0.0):
timestamp = arrow.utcnow().format('YYYY-MM-DD--HH-mm-ss')
log_fp = lambda light: \
LOGDIR / f'{light}--{timestamp}.json'
LOGFILE = log_fp('🚦')
LOGFILE.parent.mkdir(parents=True, exist_ok=True)
def loggit(J):
with open(LOGFILE, 'w') as file:
file.write(nt.dumps(J, indent=4))
# complete list of params here: https://github.com/anthropics/anthropic-sdk-python/blob/246a2978694b584429d4bbd5b44245ff8eac2ac2/src/anthropic/resources/messages.py#L812
J = {
'payload': {
'model': MODELS[model.upper()],
'max_tokens': 4096,
'system': system,
'messages': messages,
'temperature': temperature,
} | ({'tools': tools} if tools else {}),
'tries': [],
}
loggit(J)
try:
for i in range(MAX_ATTEMPTS):
sleep(exponential_backoff(i))
r = httpx.post(API_URL, headers=HEADERS, json=J['payload'], timeout=180)
code = r.status_code
light = light_for(code)
try:
content = r.json()
except ValueError:
content = r.text
J['tries'].append({
'light': light,
'status_code': code,
'response_content': content,
'response_headers': r.headers
})
loggit(J)
if light != '🟑':
break
except Exception as e:
code, light = None, '⛔️'
J['tries'].append({
'light': '⛔️',
'error': str(e),
})
loggit(J)
content = str(e)
output_light = light
if i > 0:
output_light = '🟑' if light == '🟒' else '❌'
LOGFILE.rename(log_fp(output_light))
print(f'{output_light} status_code: {code}')
print(LOGFILE.as_posix())
print(nt.dumps(content, indent=4))
if output_light == 'πŸ¦„':
print(dedent(f'''
πŸ¦„ Received an unexpected HTTP status code: {code}.
Please check and update the status handling as necessary.
\n{content}
'''))
if output_light not in '🟒🟑':
raise ClaudeException(dedent(f'''
{output_light} HTTP request to Anthropic's LLM failed.
Examine the log: {LOGFILE.as_posix()}
'''))
return content
if __name__ == "__main__":
r = query_AI(
model='haiku',
system='You are a chicken',
messages=[{
'role': 'user',
'content': 'Make a noise'
}]
)
print(nt.dumps(r, indent=4))
try:
r = query_AI(
model='haiku',
system='You are a chicken',
messages='this-will-fail'
)
except ClaudeException as e:
print('GOT ERROR:')
print(e)
./logs/http-requests/🟠--2024-05-18--14-10-00.json
payload:
model: claude-3-haiku-20240307
max_tokens: 4096
system: You are a chicken
messages: this-will-fail
temperature: 0.0
tries:
-
light: 🟠
status_code: 400
response_content:
type: error
error:
type: invalid_request_error
message: messages: Input should be a valid list
response_headers:
date: Sat, 18 May 2024 14:10:07 GMT
content-type: application/json
content-length: 108
connection: keep-alive
x-should-retry: false
request-id: req_01B8n7mxkTgJnpjm74L7iBPH
x-cloud-trace-context: ...
via: 1.1 google
cf-cache-status: DYNAMIC
server: cloudflare
cf-ray: ...
./logs/http-requests/🚦--2024-05-18--14-04-48.json
payload:
model: claude-3-haiku-20240307
max_tokens: 4096
system: You are a chicken
messages: this-will-fail
temperature: 0.0
tries:
[]
./logs/http-requests/🟒--2024-05-18--14-06-19.json
payload:
model: claude-3-haiku-20240307
max_tokens: 4096
system: You are a chicken
messages:
-
role: user
content: Make a noise
temperature: 0.0
tries:
-
light: 🟒
status_code: 200
response_content:
id: msg_01KcYKUJszKnQpFFiaPhYvUx
type: message
role: assistant
model: claude-3-haiku-20240307
stop_sequence:
usage:
input_tokens: 14
output_tokens: 15
content:
-
type: text
text: *clucks softly* Bawk bawk!
stop_reason: end_turn
response_headers:
date: Sat, 18 May 2024 14:06:55 GMT
content-type: application/json
transfer-encoding: chunked
connection: keep-alive
anthropic-ratelimit-requests-limit: 1000
anthropic-ratelimit-requests-remaining: 999
anthropic-ratelimit-requests-reset: 2024-05-18T14:07:38Z
anthropic-ratelimit-tokens-limit: 100000
anthropic-ratelimit-tokens-remaining: 100000
anthropic-ratelimit-tokens-reset: 2024-05-18T14:07:38Z
request-id: ...
x-cloud-trace-context: ...
via: 1.1 google
cf-cache-status: DYNAMIC
server: cloudflare
cf-ray: ...
content-encoding: gzip
> python ask_claude.py
🟒 status_code: 200
./logs/http-requests/🚦--2024-05-18--14-24-31.json
id: msg_01JDg4Cde8sapwsHz9ESjBvN
type: message
role: assistant
model: claude-3-haiku-20240307
stop_sequence:
usage:
input_tokens: 14
output_tokens: 15
content:
-
type: text
text: *clucks softly* Bawk bawk!
stop_reason: end_turn
id: msg_01JDg4Cde8sapwsHz9ESjBvN
type: message
role: assistant
model: claude-3-haiku-20240307
stop_sequence:
usage:
input_tokens: 14
output_tokens: 15
content:
-
type: text
text: *clucks softly* Bawk bawk!
stop_reason: end_turn
🟠 status_code: 400
./logs/http-requests/🚦--2024-05-18--14-24-42.json
type: error
error:
type: invalid_request_error
message: messages: Input should be a valid list
GOT ERROR:
🟠 HTTP request to Anthropic's LLM failed.
Examine the log: ./logs/http-requests/🚦--2024-05-18--14-24-42.json
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment