I am using semaphore to handle one request at a time and enqueue other request and process in sequence. But when I run the code it run first request and drop other request. What I am doing wrong here? In action method I am calling a service.
using Microsoft.AspNetCore.Http;
using System.Collections.Concurrent;
public class OneRequestAtATimeMiddleware
{
private readonly RequestDelegate _next;
private readonly SemaphoreSlim _lock = new SemaphoreSlim(1, 1);
private readonly ConcurrentQueue<HttpContext> _queue = new ConcurrentQueue<HttpContext>();
public OneRequestAtATimeMiddleware(RequestDelegate next)
{
_next = next;
}
public async Task InvokeAsync(HttpContext context)
{
var endpoint = context.GetEndpoint();
if (endpoint?.Metadata?.GetMetadata<OneRequestAtATime>() != null)
{
if (!await _lock.WaitAsync(TimeSpan.Zero))
{
_queue.Enqueue(context);
return;
}
try
{
await _next(context);
while (_queue.TryDequeue(out var nextContext))
{
await _next(nextContext);
}
}
finally
{
_lock.Release();
if (_queue.TryPeek(out var nextContext))
{
await InvokeAsync(nextContext);
}
}
}
else
{
await _next(context);
}
}
}
[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)]
public class OneRequestAtATime : Attribute { }
6
ChatGPT helped me to resolve the issue.
using Microsoft.AspNetCore.Http;
public class OneRequestAtATimeMiddleware
{
private readonly RequestDelegate _next;
private static readonly SemaphoreSlim _lock = new SemaphoreSlim(1,
1); // Static to ensure one lock for all requests.
public OneRequestAtATimeMiddleware(RequestDelegate next)
{
_next = next;
}
public async Task InvokeAsync(HttpContext context)
{
var endpoint = context.GetEndpoint();
if (endpoint?.Metadata?.GetMetadata<OneRequestAtATime>() != null)
{
await _lock.WaitAsync();
try
{
await _next(context);
}
finally
{
_lock.Release();
}
}
else
{
await _next(context);
}
}
}
[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)]
public class OneRequestAtATime : Attribute { }