I’m attempting to build a context-aware chatbot using RunnableWithMessageHistory backed by InMemoryChatMessageHistory. The retriever utilizes Chroma db to provide context queries to the llm. However, I’m encountering an error and require assistance in resolving it.
This code succesfully run to get the retrieval_chain:
def get_chain(vs):
llm = ChatOpenAI(model='gpt-3.5-turbo', temperature=0.3)
stuff_doc_prompt_template = ChatPromptTemplate.from_messages(
[
('system',
'Answer the question based on context: {context}'),
('human', '{input}')
]
)
stuff_doc_chain = create_stuff_documents_chain(
llm=llm,
prompt=stuff_doc_prompt_template
)
hist_prompt = (
"Given a chat history and the latest user question "
"which might reference context in the chat history, "
"formulate a standalone question which can be understood "
"without the chat history. Do NOT answer the question, "
"just reformulate it if needed and otherwise return it as is."
)
hist_prompt_template = ChatPromptTemplate.from_messages(
[
('system', hist_prompt),
MessagesPlaceholder('chat_history'),
('human', '{input}')
]
)
hist_aware_retriever = create_history_aware_retriever(
llm=llm,
retriever=vs.as_retriever(),
prompt=hist_prompt_template
)
store = {}
def get_by_session_id(session_id: str) -> BaseChatMessageHistory:
if session_id not in store:
store[session_id] = InMemoryChatMessageHistory()
return store[session_id]
hist_runnable = RunnableWithMessageHistory(
runnable=hist_aware_retriever,
get_session_history=get_by_session_id,
input_messages_key='input',
history_messages_key='chat_history'
)
ret_chain = create_retrieval_chain(
retriever=hist_runnable,
combine_docs_chain=stuff_doc_chain
)
return ret_chain
def get_vector_store():
openai_embeddings = OpenAIEmbeddings()
vs = Chroma(persist_directory='data2',
embedding_function=openai_embeddings)
return vs
vs = get_vector_store()
chain = get_chain(vs)
I can use the chain to send the first question and get a good answer from llm:
question1 = 'What is DTC'
respon1 = chain.invoke(
{'input': question1},
config={'configurable': {'session_id': 'id1'}}
)
print(respon1.get('answer'))
But, I failed to run my second query:
question2 = 'Explain the role of it'
respon2 = chain.invoke(
{'input': question2},
config={'configurable': {'session_id': 'id1'}}
)
print(respon2.get('answer'))
Here is the error message produced when sending the second query with the chain:
Traceback (most recent call last):
File "c:Usersbuka_DocumentsGenAI_projectschatchat_client_full.py", line 97, in <module>
respon2 = chain.invoke(
^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 4580, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 2497, in invoke
input = step.invoke(input, config, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablespassthrough.py", line 469, in invoke
return self._call_with_config(self._invoke, input, config, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 1593, in _call_with_config
context.run(
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesconfig.py", line 380, in call_func_with_variable_args
return func(input, **kwargs) # type: ignore[call-arg]
^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablespassthrough.py", line 456, in _invoke
**self.mapper.invoke(
^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 3144, in invoke
output = {key: future.result() for key, future in zip(steps, futures)}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 3144, in <dictcomp>
output = {key: future.result() for key, future in zip(steps, futures)}
^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libconcurrentfutures_base.py", line 456, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libconcurrentfutures_base.py", line 401, in __get_result
raise self._exception
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libconcurrentfuturesthread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 4580, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 4580, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 4580, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 2499, in invoke
input = step.invoke(input, config)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbranch.py", line 212, in invoke
output = self.default.invoke(
^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 4580, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbranch.py", line 212, in invoke
output = self.default.invoke(
^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 2497, in invoke
input = step.invoke(input, config, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corepromptsbase.py", line 179, in invoke
return self._call_with_config(
^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesbase.py", line 1593, in _call_with_config
context.run(
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corerunnablesconfig.py", line 380, in call_func_with_variable_args
return func(input, **kwargs) # type: ignore[call-arg]
^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corepromptsbase.py", line 154, in _format_prompt_with_error_handling
return self.format_prompt(**_inner_input)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corepromptschat.py", line 765, in format_prompt
messages = self.format_messages(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corepromptschat.py", line 1142, in format_messages
message = message_template.format_messages(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_corepromptschat.py", line 235, in format_messages
value = convert_to_messages(value)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_coremessagesutils.py", line 296, in convert_to_messages
return [_convert_to_message(m) for m in messages]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_coremessagesutils.py", line 296, in <listcomp>
return [_convert_to_message(m) for m in messages]
^^^^^^^^^^^^^^^^^^^^^^
File "C:Usersbuka_AppDataLocalProgramsPythonPython311Libsite-packageslangchain_coremessagesutils.py", line 280, in _convert_to_message
raise NotImplementedError(f"Unsupported message type: {type(message)}")
NotImplementedError: Unsupported message type: <class 'langchain_core.documents.base.Document'>
Thanks in advance for any help!