llm are created using WatsonxLLM class.
I’m getting error for the below code:
in validate_chains
for chain in values[“chains”]:
KeyError: ‘chains’
from langchain_core.prompts import PromptTemplate
from langchain.chains.sequential import SimpleSequentialChain
prompt_1 = PromptTemplate(
input_variables=["topic"],
template="Generate a random question about {topic}: Question: "
)
prompt_2 = PromptTemplate(
input_variables=["question"],
template="Answer the following question: {question}",
)
llm1 = flan_ul2_llm
prompt_to_flan_ul2 = prompt_1 | llm1
llm2 = flan_t5_llm
flan_to_t5 = prompt_2 | llm2
qa = SimpleSequentialChain(chains=[prompt_to_flan_ul2, flan_to_t5])
qa.invoke('weather')