When I executed the below code, it is resulting in
raise HttpResponseError(response=response, model=error)
azure.core.exceptions.HttpResponseError: (InvalidRequestParameter) Value must be between 0 and 100000.
Code: InvalidRequestParameter
Message: Value must be between 0 and 100000.
Parameter name: $skip
Exception Details: (ParameterValueOutOfRange) Value must be between 0 and 100000.
Code: ParameterValueOutOfRange
Message: Value must be between 0 and 100000.
Number of documents in my knowledge base is just 14000 but I don’t understand why it is exceeding 100,000.
Code snippet is shown below:
import asyncio
import os
import semantic_kernel as sk
from semantic_kernel.connectors.ai.open_ai import AzureTextCompletion,AzureChatCompletion
from azure.core.credentials import AzureKeyCredential
from azure.search.documents import SearchClient
from azure.search.documents.models import QueryType
from services import Service
# Initialize the Semantic Kernel
kernel = sk.Kernel()
# Initialize Azure Cognitive Search client
search_client = SearchClient(
endpoint=AZURE_SEARCH_ENDPOINT,
index_name=AZURE_SEARCH_INDEX_NAME,
credential=AzureKeyCredential(AZURE_SEARCH_API_KEY)
)
class SearchPlugin:
def search_documents(self, query: str) -> str:
search_parameters = {
"top": 50, # Number of results to return
"skip": 0 # Number of results to skip
}
results = search_client.search(search_text=query, query_type=QueryType.SIMPLE)
#, **search_parameters)
print("Number of documents = ", len(list(results)))
response = [doc['content'] for doc in results]
return 'n'.join(response)
# Manually register the plugins with the kernel
plugins = {"SearchPlugin": SearchPlugin()}
# Define a function to interact with the AI agent
def interact_with_agent(plugins, plugin_name, function_name, **kwargs):
plugin = plugins.get(plugin_name)
if not plugin:
raise ValueError(f"Plugin {plugin_name} not found.")
func = getattr(plugin, function_name, None)
if not func:
raise ValueError(f"Function {function_name} not found in plugin {plugin_name}.")
return func(**kwargs)
# System prompt for the chat completion
prompt = "You are an AI language model assistant."
from semantic_kernel.connectors.ai.open_ai import OpenAIChatPromptExecutionSettings
from semantic_kernel.prompt_template import InputVariable, PromptTemplateConfig
selectedService = Service.AzureOpenAI
service_id="gpt-4o"
chat_completion = AzureChatCompletion(service_id=service_id,
endpoint=AZURE_OPENAI_ENDPOINT,
api_key=AZURE_OPENAI_API_KEY,
deployment_name=AZURE_OPENAI_DEPLOYMENT_NAME
)
kernel.add_service(chat_completion)
execution_settings = kernel.get_prompt_execution_settings_from_service_id(
service_id=service_id
)
execution_settings.max_tokens = 2000
execution_settings.temperature = 0.7
execution_settings.top_p = 0.8
prompt_template_config = PromptTemplateConfig(
template=prompt,
name="PMI Chatbot",
template_format="semantic-kernel",
input_variables=[
InputVariable(name="input", description="The user input", is_required=True),
],
execution_settings=execution_settings,
)
Respnose_Func = kernel.add_function(
function_name=interact_with_agent(plugins, "SearchPlugin", "search_documents", query=service_query),
plugin_name="SearchPlugin",
prompt_template_config=prompt_template_config,
)
# Run your prompt
# Note: functions are run asynchronously
async def main():
input_text = "What is Project Management"
result = await kernel.invoke(Respnose_Func, input=input_text)
print(result)
if __name__ == "__main__":
asyncio.run(main())
Can someone please help me to make the above code executable? TIA
I tried surfing internet but I didn’t get the solution. I am expecting this code to run without error.