from langchain.chains import VectorDBQA
from langchain.llms import OpenAI
# Now we can load the persisted database from disk, and use it as normal.
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding)
from langchain.chains import ConversationalRetrievalChain
from langchain.memory import ConversationBufferMemory
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
qa = ConversationalRetrievalChain.from_llm(
OpenAI(temperature=0.8),
vectordb.as_retriever(search_kwargs={"k": 3}),
memory=memory
)
from langchain_core.runnables import ConfigurableFieldSpec
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_openai.chat_models import ChatOpenAI
from langchain.schema.output_parser import StrOutputParser
from operator import itemgetter
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.7)
store = {}
def get_session_history(user_id: str, conversation_id: str) -> BaseChatMessageHistory:
if (user_id, conversation_id) not in store:
store[(user_id, conversation_id)] = ChatMessageHistory()
return store[(user_id, conversation_id)]
with_message_history = RunnableWithMessageHistory(
qa,
get_session_history=get_session_history,
input_messages_key="question",
history_messages_key="",
history_factory_config=[
ConfigurableFieldSpec(
id="user_id",
annotation=str,
name="User ID",
description="Unique identifier for the user.",
default="",
is_shared=True,
),
ConfigurableFieldSpec(
id="conversation_id",
annotation=str,
name="Conversation ID",
description="Unique identifier for the conversation.",
default="",
is_shared=True,
),
],
)
user_id = "user123"
conversation_id = "conv456"
question = "list of all candidates in resume?"
# Invoke the chain with the user's question and conversation context
response = with_message_history.invoke(
{ "question":question,},
config={"configurable": {"user_id": user_id, "conversation_id": conversation_id}},
)
response
but while using this i am getting error that
TypeError Traceback (most recent call last)
in <cell line: 6>()
4
5 # Invoke the chain with the user’s question and conversation context
—-> 6 response = with_message_history.invoke(
7 { “question”:question,},
8 config={“configurable”: {“user_id”: user_id, “conversation_id”: conversation_id}},
17 frames
/usr/local/lib/python3.10/dist-packages/tiktoken/core.py in encode(self, text, allowed_special, disallowed_special)
114 if not isinstance(disallowed_special, frozenset):
115 disallowed_special = frozenset(disallowed_special)
–> 116 if match := _special_token_regex(disallowed_special).search(text):
117 raise_disallowed_special_token(match.group())
118
TypeError: expected string or buffer
can anyone help me out with this error that i am getting while running invoke in google collab i am not able to solve it