Created
February 1, 2024 18:04
-
-
Save psymbio/130849260736f7e14422a7bdfc441a21 to your computer and use it in GitHub Desktop.
Openai on Jupyterlite (httpx custom transport with urllib3 - failed for streaming)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import micropip | |
await micropip.install('https://raw.githubusercontent.com/psymbio/pyodide_wheels/main/multidict/multidict-4.7.6-py3-none-any.whl', keep_going=True) | |
await micropip.install('https://raw.githubusercontent.com/psymbio/pyodide_wheels/main/frozenlist/frozenlist-1.4.0-py3-none-any.whl', keep_going=True) | |
await micropip.install('https://raw.githubusercontent.com/psymbio/pyodide_wheels/main/aiohttp/aiohttp-3.9.1-py3-none-any.whl', keep_going=True) | |
await micropip.install('https://raw.githubusercontent.com/psymbio/pyodide_wheels/main/openai/openai-1.3.7-py3-none-any.whl', keep_going=True) | |
await micropip.install('https://raw.githubusercontent.com/psymbio/pyodide_wheels/main/urllib3/urllib3-2.1.0-py3-none-any.whl', keep_going=True) | |
await micropip.install("ssl") | |
import ssl | |
await micropip.install("httpx", keep_going=True) | |
import httpx | |
await micropip.install('https://raw.githubusercontent.com/psymbio/pyodide_wheels/main/urllib3/urllib3-2.1.0-py3-none-any.whl', keep_going=True) | |
import urllib3 | |
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | |
import json | |
class URLLib3Transport(httpx.BaseTransport): | |
def __init__(self): | |
pass | |
def handle_request(self, request: httpx.Request): | |
payload = json.loads(request.content.decode("utf-8").replace("'",'"')) | |
urllib3_response = urllib3.request(request.method, str(request.url), headers=request.headers, json=payload, preload_content=False) | |
for chunk in urllib3_response.stream(): | |
chunk_dict = json.loads(chunk.decode('utf-8')) | |
# print(chunk_dict) | |
print(chunk_dict['choices'][0]['message']['content']) | |
client = httpx.Client(transport=URLLib3Transport()) | |
from openai import OpenAI | |
from openai import AzureOpenAI | |
import openai | |
import os | |
os.environ['AZURE_OPENAI_API_KEY'] = "xxx" | |
openai_client = AzureOpenAI( | |
api_version="2023-07-01-preview", | |
azure_endpoint="https://xxx.openai.azure.com/", | |
http_client=client | |
) | |
response = openai_client.chat.completions.with_raw_response.create( | |
messages=[{ | |
"role": "user", | |
"content": "sing me a song", | |
}], | |
model="gpt-35-turbo", | |
max_tokens=30, | |
temperature=0.7, | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment