Compare commits

..

4 Commits

Author SHA1 Message Date
5733d52e51 Merge pull request 'feature/cache-flush-api-enhancement' (#7) from feature/cache-flush-api-enhancement into master
Reviewed-on: #7
2026-01-25 11:34:43 +11:00
bf8a176dda Bump version: 2.0.4 → 2.0.5 2026-01-25 11:09:29 +11:00
3e8e819ecf feat: enhance cache flush API for remote-specific S3 clearing
Add support for remote-specific S3 object deletion using hierarchical key prefixes.
When a remote parameter is provided, only objects under that remote's hierarchy
(e.g., 'fedora/*') are deleted, preserving cached files for other remotes.

Key improvements:
- Use S3 list_objects_v2 Prefix parameter for targeted deletion
- Enhanced logging to indicate scope of cache clearing operations
- Backward compatible - existing behavior preserved when no remote specified

Resolves artifactapi-u46
2026-01-25 11:08:46 +11:00
de04e4d2b2 Merge pull request 'benvin/path-based-storage' (#6) from benvin/path-based-storage into master
Reviewed-on: #6
2026-01-25 00:00:53 +11:00
2 changed files with 11 additions and 5 deletions

View File

@ -1,6 +1,6 @@
[project] [project]
name = "artifactapi" name = "artifactapi"
version = "2.0.4" version = "2.0.5"
description = "Generic artifact caching system with support for various package managers" description = "Generic artifact caching system with support for various package managers"
dependencies = [ dependencies = [
@ -44,7 +44,7 @@ dev = [
] ]
[tool.bumpversion] [tool.bumpversion]
current_version = "2.0.4" current_version = "2.0.5"
commit = true commit = true
tag = true tag = true
message = "Bump version: {current_version} → {new_version}" message = "Bump version: {current_version} → {new_version}"

View File

@ -116,7 +116,12 @@ def flush_cache(
# Flush S3 objects if requested # Flush S3 objects if requested
if cache_type in ["all", "files"]: if cache_type in ["all", "files"]:
try: try:
response = storage.client.list_objects_v2(Bucket=storage.bucket) # Use prefix filtering for remote-specific deletion
list_params = {"Bucket": storage.bucket}
if remote:
list_params["Prefix"] = f"{remote}/"
response = storage.client.list_objects_v2(**list_params)
if 'Contents' in response: if 'Contents' in response:
objects_to_delete = [obj['Key'] for obj in response['Contents']] objects_to_delete = [obj['Key'] for obj in response['Contents']]
@ -128,8 +133,9 @@ def flush_cache(
logger.warning(f"Failed to delete S3 object {key}: {e}") logger.warning(f"Failed to delete S3 object {key}: {e}")
if objects_to_delete: if objects_to_delete:
result["flushed"]["operations"].append(f"Deleted {len(objects_to_delete)} S3 objects") scope = f" for remote '{remote}'" if remote else ""
logger.info(f"Cache flush: Deleted {len(objects_to_delete)} S3 objects") result["flushed"]["operations"].append(f"Deleted {len(objects_to_delete)} S3 objects{scope}")
logger.info(f"Cache flush: Deleted {len(objects_to_delete)} S3 objects{scope}")
except Exception as e: except Exception as e:
result["flushed"]["operations"].append(f"S3 flush failed: {str(e)}") result["flushed"]["operations"].append(f"S3 flush failed: {str(e)}")