Can you help me please with this , i try to use ollama to make a chat that can call tools in order to answer to the the user question, I use the same documentation in langachain langchain openai call tools, so i just changed openai with ollama.The error that i get is this : ollama_langchain/node_modules/@langchain/community/dist/experimental/chat_models/ollama_functions.js:97
throw new Error(Failed to parse a function call from ${this.llm.model} output: ${chatGenerationContent}
);
^
Error: Failed to parse a function call from llama2 output: {
“tool”: “getAge”,
“tool_input”: {
“person”: “Alice L.”
},
“output”: 27
}
at OllamaFunctions._generate (file:///C:/Users/YassineES-SADANY/Downloads/ollama_langchain/node_modules/@langchain/community/dist/experimental/chat_models/ollama_functions.js:97:19)
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
at async Promise.allSettled (index 0)
at async OllamaFunctions._generateUncached (file:///C:/Users/YassineES-SADANY/Downloads/ollama_langchain/node_modules/@langchain/core/dist/language_models/chat_models.js:169:29)
at async OllamaFunctions.invoke (file:///C:/Users/YassineES-SADANY/Downloads/ollama_langchain/node_modules/@langchain/core/dist/language_models/chat_models.js:53:24)
at async RunnableSequence.invoke (file:///C:/Users/YassineES-SADANY/Downloads/ollama_langchain/node_modules/@langchain/core/dist/runnables/base.js:1120:33)
at async file:///C:/Users/YassineES-SADANY/Downloads/ollama_langchain/zod_ollama.js:123:5
Node.js v20.12.2. Here is my code :
import { z } from "zod";
import { DynamicStructuredTool } from "@langchain/core/tools";
import { OllamaFunctions } from "@langchain/community/experimental/chat_models/ollama_functions";
import { JsonOutputToolsParser } from "langchain/output_parsers";
import {
RunnableLambda,
RunnablePassthrough,
RunnableSequence,
} from "@langchain/core/runnables";
import { convertToOpenAITool } from "@langchain/core/utils/function_calling";
const toolSystemPromptTemplate = `You are an assistant that has access to the following set of tools. Here are the names and descriptions for each tool:
{tools}
To use a tool, respond with a JSON object with the following structure:
{{
"tool": <name of the called tool>,
"tool_input": <parameters for the tool matching the above JSON schema>
"output": <output of the tool>
}}`;
const addTool = new DynamicStructuredTool({
name: "add",
description: "Add two integers together.",
schema: z.object({
firstInt: z.number(),
secondInt: z.number(),
}),
func: async ({ firstInt, secondInt }) => {
return (firstInt + secondInt).toString();
},
});
const multiplyTool = new DynamicStructuredTool({
name: "multiply",
description: "Multiply two integers together.",
schema: z.object({
firstInt: z.number(),
secondInt: z.number(),
}),
func: async ({ firstInt, secondInt }) => {
return (firstInt * secondInt).toString();
},
});
const exponentiateTool = new DynamicStructuredTool({
name: "exponentiate",
description: "Exponentiate the base to the exponent power.",
schema: z.object({
base: z.number(),
exponent: z.number(),
}),
func: async ({ base, exponent }) => {
return (base ** exponent).toString();
},
});
const getAgeTool = new DynamicStructuredTool({
name: "getAge",
description: "Get the age of a person. The tool accepts a string with the person's name and returns their age.",
schema: z.object({
person: z.string(),
}),
func: async ({person}) => {
switch (person) {
case "Bob K.":
return "Bob is 45 years old.";
case "Alice L.":
return "Alice is 32 years old.";
default:
return "I don't know how old that person is.";
}
}
});
const model = new OllamaFunctions({
model: "llama2",
toolSystemPromptTemplate,
})
const tools = [getAgeTool, multiplyTool, exponentiateTool, addTool];
const toolMap = {
multiply: multiplyTool,
exponentiate: exponentiateTool,
add: addTool,
getAge: getAgeTool,
} ;
const modelWithTools = model.bind({
functions: tools.map(convertToOpenAITool),
});
// Function for dynamically constructing the end of the chain based on the model-selected tool.
const callSelectedTool = RunnableLambda.from(
(toolInvocation) => {
const selectedTool = toolMap[toolInvocation.type];
if (!selectedTool) {
throw new Error(
`No matching tool available for requested type "${toolInvocation.type}".`
);
}
const toolCallChain = RunnableSequence.from([
(toolInvocation) => toolInvocation.args,
selectedTool,
]);
// We use `RunnablePassthrough.assign` here to return the intermediate `toolInvocation` params
// as well, but you can omit if you only care about the answer.
return toolCallChain;
}
);
const chain = RunnableSequence.from([
modelWithTools,
new JsonOutputToolsParser(),
// .map() allows us to apply a function for each item in a list of inputs.
// Required because the model can call multiple tools at once.
callSelectedTool.map(),
]);
await chain.invoke("How old Alice L. is?");