Can anyone tell me what’s wrong with this code?
def get_answer_from_text(document_id: str, question: str, conversation_history: List[dict]) -> str:
try:
Settings.embed_model = HuggingFaceEmbedding(
model_name="BAAI/bge-small-en-v1.5"
)
reader = SimpleDirectoryReader(input_files=[os.path.join(UPLOAD_DIR, f"{document_id}.txt")])
documents = reader.load_data()
# Combine conversation history with the new question
combined_text = "n".join([f"Q: {entry['question']}nA: {entry['answer']}" for entry in conversation_history])
combined_text += f"nQ: {question}"
llm = HuggingFaceInferenceAPI(model_name = "mistralai/Mistral-7B-Instruct-v0.3", token=access_token)
Settings.llm = llm
index = VectorStoreIndex.from_documents(
documents,
embed_model=embed_model, llm=llm
)
query_engine = index.as_query_engine(llm = llm)
answer = query_engine.query(combined_text, top_k=1)
print(answer)
I have tried using the HuggingFaceEmbedding model as well as HuggingFaceInferenceAPI model but it is still not working with either of the way. I am getting the error: “embed_model” is not defined. Can someone please check this code and help me with this?
New contributor
Jay Singh is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.