It works fine with up to 21 files, takes a couple of seconds. But if I try feeding it more than 21, it hangs indefinitely and times out (> 30 seconds). From what I saw online, I am very far from hitting the S3 concurrent requests limit (which should be in the thousands), so I’m really not sure what could be happening.
Here’s the relevant part of the code, It specifically hangs on the await s3_client.copy
line:
async def _copy_image(
s3_client, source_key: str, destination_key: str, bucket_name: str
) -> None:
copy_source = {
"Bucket": bucket_name,
"Key": source_key,
}
await s3_client.copy(
CopySource=copy_source,
Bucket=bucket_name,
Key=destination_key,
)
async def copy_images_for_new_guide(
keys: list[str],
new_guide_id: int,
org_id: int,
) -> None:
session = aioboto3.Session()
async with session.client(
"s3",
region_name=variables.secrets.AWS_REGION_NAME,
) as s3_client:
tasks = []
for key in keys:
new_key = (
f"orgs/org_{org_id}/guide_{new_guide_id}/{key.split('/')[-1]}"
)
tasks.append(
_copy_image(
s3_client, key, new_key, variables.settings.IMAGES_BUCKET
)
)
await asyncio.gather(*tasks)
What could be causing this behavior?