Skip to content

Instantly share code, notes, and snippets.

@mcminis1
Created March 21, 2023 19:51
Show Gist options
  • Save mcminis1/5ec7f03267ccdc9260700949d467f8e6 to your computer and use it in GitHub Desktop.
Save mcminis1/5ec7f03267ccdc9260700949d467f8e6 to your computer and use it in GitHub Desktop.
An example LLM workflow using mr_graph
import openai
import asyncio
import sys
import uvloop
from mr_graph.graph import Graph
# mr_graph >= 0.2.5
default_model_name = 'gpt-3.5-turbo'
async def get_answer(user_question: str, temp=0):
"""get an answer from openai
Args:
user_question (str): question from the user
Returns
-------
completion : str
LLM completion
"""
messages=[{"role": "user", "content": user_question}]
completion = await openai.ChatCompletion.acreate(model=default_model_name, messages=messages, temperature=temp)
return completion['choices'][0]['message']['content']
def format_answer(user_question:str, completion:str):
"""parse the answer
Args:
user_question (str): user question sent to the LLM. might be needed to determine formatting.
completion (str): LLM completion.
Returns
-------
answer : dict[str, str]
LLM completion
"""
answer = completion.strip(' \n').removeprefix('As an AI language model, I cannot provide a specific answer to this question').strip()
return answer
async def get_structured_answer(user_question: str):
"""get answer + structure it
Args:
user_question (str): user question sent to the LLM
Returns
-------
answer : dict[str, str]
LLM completion
"""
llm = Graph(nodes=[get_answer, format_answer])
q = llm.input(name='user_question')
o1 = llm.get_answer(q)
llm.outputs = llm.format_answer(q, o1)
a = await llm(user_question=user_question)
return a.answer
async def summarize_answers(answers: list[str], temp=0):
"""summarize answers
Args:
answers (str): answers sent to the LLM for summary
Returns
-------
summary : dict[str, str]
LLM completion
"""
nl = "\n"
prompt = f"""
summarize the following text.
{nl.join(answers)}
"""
messages=[{"role": "user", "content": prompt}]
completion = await openai.ChatCompletion.acreate(model=default_model_name, messages=messages, temperature=temp)
return completion['choices'][0]['message']['content']
async def get_summarized_q_and_a(questions: list[str]):
"""ask a bunch of questions, get answers, summarize them.
Args:
questions (list[str]): user questions sent to the LLM
Returns
-------
summary : dict[str, str]
LLM completion
"""
llm = Graph(nodes=[get_structured_answer, summarize_answers])
answers = llm.aggregator(name="answers")
for question in questions:
sa = llm.get_structured_answer(user_question=question)
answers += sa.answer
llm.outputs = llm.summarize_answers(answers=answers)
v = await llm(answers)
return v.summary
async def main():
questions = [
'who is abraham lincoln?',
'what did abraham lincoln do?',
'when was abraham lincoln alive?',
'where did abraham lincoln live?',
'why is abraham lincoln famous?'
]
r = await get_summarized_q_and_a(questions)
print(r)
if __name__ == '__main__':
if sys.version_info >= (3, 11):
with asyncio.Runner(loop_factory=uvloop.new_event_loop) as runner:
runner.run(main())
else:
uvloop.install()
asyncio.run(main())
@mcminis1
Copy link
Author

Returns:
Abraham Lincoln was the 16th President of the United States, serving from 1861 until his assassination in 1865. He is known for leading the country through its Civil War, preserving the Union, and ending slavery. He issued the Emancipation Proclamation, delivered the Gettysburg Address, signed the Homestead Act, established the first income tax and Department of Agriculture, and promoted the idea of a transcontinental railroad. He is widely regarded as one of the greatest American presidents and was assassinated by John Wilkes Booth in 1865.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment