feat: enhance cache flush API for remote-specific S3 clearing

Add support for remote-specific S3 object deletion using hierarchical key prefixes.
When a remote parameter is provided, only objects under that remote's hierarchy
(e.g., 'fedora/*') are deleted, preserving cached files for other remotes.

Key improvements:
- Use S3 list_objects_v2 Prefix parameter for targeted deletion
- Enhanced logging to indicate scope of cache clearing operations
- Backward compatible - existing behavior preserved when no remote specified

Resolves artifactapi-u46
This commit is contained in:
Ben Vincent 2026-01-25 11:08:46 +11:00
parent de04e4d2b2
commit 3e8e819ecf

View File

@ -116,7 +116,12 @@ def flush_cache(
# Flush S3 objects if requested
if cache_type in ["all", "files"]:
try:
response = storage.client.list_objects_v2(Bucket=storage.bucket)
# Use prefix filtering for remote-specific deletion
list_params = {"Bucket": storage.bucket}
if remote:
list_params["Prefix"] = f"{remote}/"
response = storage.client.list_objects_v2(**list_params)
if 'Contents' in response:
objects_to_delete = [obj['Key'] for obj in response['Contents']]
@ -128,8 +133,9 @@ def flush_cache(
logger.warning(f"Failed to delete S3 object {key}: {e}")
if objects_to_delete:
result["flushed"]["operations"].append(f"Deleted {len(objects_to_delete)} S3 objects")
logger.info(f"Cache flush: Deleted {len(objects_to_delete)} S3 objects")
scope = f" for remote '{remote}'" if remote else ""
result["flushed"]["operations"].append(f"Deleted {len(objects_to_delete)} S3 objects{scope}")
logger.info(f"Cache flush: Deleted {len(objects_to_delete)} S3 objects{scope}")
except Exception as e:
result["flushed"]["operations"].append(f"S3 flush failed: {str(e)}")