mirror of
https://github.com/SkalaraAI/langchain-chatbot.git
synced 2025-04-03 20:10:17 -04:00
55 lines
2.0 KiB
Python
55 lines
2.0 KiB
Python
"""Create a ChatVectorDBChain for question/answering."""
|
|
from langchain.callbacks.manager import AsyncCallbackManager
|
|
from langchain.callbacks.tracers import LangChainTracer
|
|
from langchain.chains import ConversationalRetrievalChain
|
|
from langchain.chains.chat_vector_db.prompts import (CONDENSE_QUESTION_PROMPT,
|
|
QA_PROMPT)
|
|
from langchain.chains.llm import LLMChain
|
|
from langchain.chains.question_answering import load_qa_chain
|
|
from langchain.llms import OpenAI
|
|
from langchain.vectorstores.base import VectorStore
|
|
|
|
|
|
def get_chain(
|
|
vectorstore: VectorStore, question_handler, stream_handler, tracing: bool = False
|
|
) -> ConversationalRetrievalChain:
|
|
"""Create a ConversationalRetrievalChain for question/answering."""
|
|
# Construct a ConversationalRetrievalChain with a streaming llm for combine docs
|
|
manager = AsyncCallbackManager([])
|
|
question_manager = AsyncCallbackManager([question_handler])
|
|
stream_manager = AsyncCallbackManager([stream_handler])
|
|
if tracing:
|
|
tracer = LangChainTracer()
|
|
tracer.load_default_session()
|
|
manager.add_handler(tracer)
|
|
question_manager.add_handler(tracer)
|
|
stream_manager.add_handler(tracer)
|
|
|
|
question_gen_llm = OpenAI(
|
|
temperature=0.8,
|
|
verbose=True,
|
|
callback_manager=question_manager,
|
|
)
|
|
streaming_llm = OpenAI(
|
|
streaming=True,
|
|
callback_manager=stream_manager,
|
|
verbose=True,
|
|
temperature=0.8,
|
|
)
|
|
|
|
question_generator = LLMChain(
|
|
llm=question_gen_llm, prompt=CONDENSE_QUESTION_PROMPT, callback_manager=manager
|
|
)
|
|
doc_chain = load_qa_chain(
|
|
streaming_llm, chain_type="stuff", prompt=QA_PROMPT, callback_manager=manager
|
|
)
|
|
|
|
qa = ConversationalRetrievalChain(
|
|
retriever=vectorstore.as_retriever(),
|
|
combine_docs_chain=doc_chain,
|
|
question_generator=question_generator,
|
|
callback_manager=manager,
|
|
verbose=True
|
|
)
|
|
return qa
|