Created
January 4, 2024 18:53
-
-
Save robert-mcdermott/ef4720ea2f26ed48570572cf5c956ac1 to your computer and use it in GitHub Desktop.
Query web pages with local LLMs using Ollama and Langchain
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pip install chromadb==0.4.15 # need to pin to this version for current langchain version | |
from langchain.llms import Ollama | |
from langchain.document_loaders import WebBaseLoader | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain.embeddings import GPT4AllEmbeddings | |
from langchain.vectorstores import Chroma | |
from langchain.chains import RetrievalQA | |
ollama = Ollama(base_url='http://localhost:11434', model='zephyr:latest') | |
loader = WebBaseLoader('https://en.wikipedia.org/wiki/Fred_Hutchinson_Cancer_Center') | |
data = loader.load() | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0) | |
all_splits = text_splitter.split_documents(data) | |
vectorstore = Chroma.from_documents(documents=all_splits, embedding=GPT4AllEmbeddings()) | |
qachain = RetrievalQA.from_chain_type(ollama, retriever=vectorstore.as_retriever()) | |
question = "Who are the notable faculty?" | |
print(qachain({"query": question})) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment