Compare commits

..

7 Commits

Author SHA1 Message Date
e4013e6a2a Merge pull request 'feat: index caching' (#4) from benvin/index_caching into master
Reviewed-on: #4
2026-01-13 18:14:39 +11:00
9defc78e21 feat: index caching
- improve index detection for rpms
- improve logging
2026-01-13 18:13:47 +11:00
f40675f3d2 Merge pull request 'feat: add fedora index files' (#3) from benvin/fedora_indexes into master
Reviewed-on: #3
2026-01-10 17:02:58 +11:00
b54e6c3e0c feat: add fedora index files
- ensure files matching xml.zck and xml.zst are marked as index files
2026-01-10 17:01:39 +11:00
79a8553e9c Merge pull request 'Fix S3 SSL certificate validation and boto3 checksum compatibility' (#2) from benvin/boto3_fixes into master
Reviewed-on: #2
2026-01-08 23:55:42 +11:00
b7205e09a3 Fix S3 SSL certificate validation and boto3 checksum compatibility
- Add support for custom CA bundle via REQUESTS_CA_BUNDLE/SSL_CERT_FILE environment variables
- Configure boto3 client with custom SSL verification to support Ceph RadosGW through nginx proxy
- Maintain boto3 checksum validation configuration for compatibility with third-party S3 providers
- Resolves XAmzContentSHA256Mismatch errors when connecting to RadosGW endpoints

Fixes #4400 compatibility issue with boto3 v1.36+ stricter checksum validation
2026-01-08 23:54:39 +11:00
1fb6b89a5f Merge pull request 'Fix boto3 XAmzContentSHA256Mismatch errors with Ceph RadosGW' (#1) from fix/boto3-checksum-validation into master
Reviewed-on: #1
2026-01-08 23:07:51 +11:00
5 changed files with 49 additions and 15 deletions

View File

@ -37,6 +37,7 @@ RUN uv sync --frozen
# Copy application source
COPY --chown=appuser:appuser src/ ./src/
COPY --chown=appuser:appuser remotes.yaml ./
COPY --chown=appuser:appuser ca-bundle.pem ./
# Expose port
EXPOSE 8000

View File

@ -1,6 +1,6 @@
[project]
name = "artifactapi"
version = "2.0.0"
version = "2.0.1"
description = "Generic artifact caching system with support for various package managers"
dependencies = [

View File

@ -23,8 +23,13 @@ class RedisCache:
file_path.endswith("APKINDEX.tar.gz")
or file_path.endswith("Packages.gz")
or file_path.endswith("repomd.xml")
or "repodata/" in file_path
and file_path.endswith((".xml", ".xml.gz", ".xml.bz2", ".xml.xz"))
or ("repodata/" in file_path
and file_path.endswith((
".xml", ".xml.gz", ".xml.bz2", ".xml.xz", ".xml.zck", ".xml.zst",
".sqlite", ".sqlite.gz", ".sqlite.bz2", ".sqlite.xz", ".sqlite.zck", ".sqlite.zst",
".yaml.xz", ".yaml.gz", ".yaml.bz2", ".yaml.zst",
".asc", ".txt"
)))
)
def get_index_cache_key(self, remote_name: str, path: str) -> str:

View File

@ -1,6 +1,7 @@
import os
import re
import hashlib
import logging
from typing import Dict, Any, Optional
import httpx
from fastapi import FastAPI, HTTPException, Response, Query, File, UploadFile
@ -20,7 +21,14 @@ class ArtifactRequest(BaseModel):
include_pattern: str
app = FastAPI(title="Artifact Storage API", version="2.0.0")
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
app = FastAPI(title="Artifact Storage API", version="2.0.1")
# Initialize components using config
config_path = os.environ.get("CONFIG_PATH")
@ -45,7 +53,7 @@ def read_root():
config._check_reload()
return {
"message": "Artifact Storage API",
"version": "2.0.0",
"version": app.version,
"remotes": list(config.config.get("remotes", {}).keys()),
}
@ -105,6 +113,7 @@ async def cache_single_artifact(url: str, remote_name: str, path: str) -> dict:
key = storage.get_object_key_from_path(remote_name, path)
if storage.exists(key):
logger.info(f"Cache ALREADY EXISTS: {url} (key: {key})")
return {
"url": url,
"cached_url": storage.get_url(key),
@ -118,6 +127,8 @@ async def cache_single_artifact(url: str, remote_name: str, path: str) -> dict:
storage_path = storage.upload(key, response.content)
logger.info(f"Cache ADD SUCCESS: {url} (size: {len(response.content)} bytes, key: {key})")
return {
"url": url,
"cached_url": storage.get_url(key),
@ -173,6 +184,7 @@ async def get_artifact(remote_name: str, path: str):
# Check if artifact matches configured patterns
if not await check_artifact_patterns(remote_name, repo_path, file_path, path):
logger.info(f"PATTERN BLOCKED: {remote_name}/{path} - not matching include patterns")
raise HTTPException(
status_code=403, detail="Artifact not allowed by configuration patterns"
)
@ -198,6 +210,7 @@ async def get_artifact(remote_name: str, path: str):
# Index file exists, but check if it's still valid
if not cache.is_index_valid(remote_name, path):
# Index has expired, remove it from S3
logger.info(f"Index EXPIRED: {remote_name}/{path} - removing from cache")
cache.cleanup_expired_index(storage, remote_name, path)
cached_key = None # Force re-download
@ -207,6 +220,9 @@ async def get_artifact(remote_name: str, path: str):
artifact_data = storage.download_object(cached_key)
filename = os.path.basename(path)
# Log cache hit
logger.info(f"Cache HIT: {remote_name}/{path} (size: {len(artifact_data)} bytes, key: {cached_key})")
# Determine content type based on file extension
content_type = "application/octet-stream"
if filename.endswith(".tar.gz"):
@ -245,9 +261,11 @@ async def get_artifact(remote_name: str, path: str):
)
# Artifact not cached, cache it first
logger.info(f"Cache MISS: {remote_name}/{path} - fetching from remote: {remote_url}")
result = await cache_single_artifact(remote_url, remote_name, path)
if result["status"] == "error":
logger.error(f"Cache ADD FAILED: {remote_name}/{path} - {result['error']}")
raise HTTPException(
status_code=502, detail=f"Failed to fetch artifact: {result['error']}"
)
@ -258,6 +276,7 @@ async def get_artifact(remote_name: str, path: str):
cache_config = config.get_cache_config(remote_name)
index_ttl = cache_config.get("index_ttl", 300) # Default 5 minutes
cache.mark_index_cached(remote_name, path, index_ttl)
logger.info(f"Index file cached with TTL: {remote_name}/{path} (ttl: {index_ttl}s)")
# Now return the cached artifact
try:

View File

@ -22,16 +22,25 @@ class S3Storage:
self.bucket = bucket
self.secure = secure
self.client = boto3.client(
"s3",
endpoint_url=f"http{'s' if self.secure else ''}://{self.endpoint}",
aws_access_key_id=self.access_key,
aws_secret_access_key=self.secret_key,
config=Config(
request_checksum_calculation="when_required",
response_checksum_validation="when_required"
)
)
ca_bundle = os.environ.get('REQUESTS_CA_BUNDLE') or os.environ.get('SSL_CERT_FILE')
config_kwargs = {
"request_checksum_calculation": "when_required",
"response_checksum_validation": "when_required"
}
client_kwargs = {
"endpoint_url": f"http{'s' if self.secure else ''}://{self.endpoint}",
"aws_access_key_id": self.access_key,
"aws_secret_access_key": self.secret_key,
"config": Config(**config_kwargs)
}
if ca_bundle and os.path.exists(ca_bundle):
client_kwargs["verify"] = ca_bundle
print(f"Debug: Using CA bundle: {ca_bundle}")
else:
print(f"Debug: No CA bundle found. REQUESTS_CA_BUNDLE={os.environ.get('REQUESTS_CA_BUNDLE')}, SSL_CERT_FILE={os.environ.get('SSL_CERT_FILE')}")
self.client = boto3.client("s3", **client_kwargs)
# Try to ensure bucket exists, but don't fail if MinIO isn't ready yet
try: