feat: add GitHub integration for automated package updates
- add GitHub API functions to fetch latest releases and compare versions - implement package update checking against GitHub repositories - add upgrade and upgrade-all CLI commands with dry-run support - include .ruff_cache in .gitignore for cleaner repository
This commit is contained in:
parent
c6d8e33a54
commit
2954835dd0
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
dist
|
||||
env
|
||||
.claude
|
||||
.ruff_cache
|
||||
|
||||
344
tools/build
344
tools/build
@ -222,6 +222,128 @@ def get_gitea_token() -> str:
|
||||
return gitea_token
|
||||
|
||||
|
||||
# ==================== GITHUB API FUNCTIONS ====================
|
||||
|
||||
def get_github_latest_release(repo: str) -> Optional[dict]:
|
||||
"""
|
||||
Get the latest release from GitHub API.
|
||||
|
||||
Args:
|
||||
repo: GitHub repository in format "owner/repo"
|
||||
|
||||
Returns:
|
||||
Latest release info or None if not found
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
github_token = get_github_token()
|
||||
|
||||
url = f"https://api.github.com/repos/{repo}/releases/latest"
|
||||
headers = {
|
||||
'Authorization': f'token {github_token}',
|
||||
'Accept': 'application/vnd.github.v3+json'
|
||||
}
|
||||
|
||||
logger.debug(f"Checking GitHub releases: {url}")
|
||||
response = requests.get(url, headers=headers, timeout=30)
|
||||
|
||||
if response.status_code == 200:
|
||||
release = response.json()
|
||||
logger.debug(f"Latest release for {repo}: {release.get('tag_name', 'unknown')}")
|
||||
return release
|
||||
elif response.status_code == 404:
|
||||
logger.warning(f"No releases found for {repo}")
|
||||
return None
|
||||
elif response.status_code == 401:
|
||||
logger.error("GitHub authentication failed. Check GitHub token.")
|
||||
return None
|
||||
else:
|
||||
logger.warning(
|
||||
f"Unexpected response from GitHub API for {repo}: "
|
||||
f"{response.status_code} - {response.text}"
|
||||
)
|
||||
return None
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"Failed to check GitHub releases for {repo}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def normalize_github_version(version: str) -> str:
|
||||
"""
|
||||
Normalize version string by removing 'v' prefix if present.
|
||||
|
||||
Args:
|
||||
version: Version string (e.g., "v1.2.3" or "1.2.3")
|
||||
|
||||
Returns:
|
||||
Normalized version string (e.g., "1.2.3")
|
||||
"""
|
||||
if version.startswith('v'):
|
||||
return version[1:]
|
||||
return version
|
||||
|
||||
|
||||
def compare_versions(current: str, latest: str) -> bool:
|
||||
"""
|
||||
Compare version strings to determine if latest is newer.
|
||||
|
||||
Args:
|
||||
current: Current version string
|
||||
latest: Latest version string
|
||||
|
||||
Returns:
|
||||
True if latest is newer than current
|
||||
"""
|
||||
def version_tuple(v):
|
||||
parts = []
|
||||
for part in v.split('.'):
|
||||
try:
|
||||
parts.append(int(part))
|
||||
except ValueError:
|
||||
parts.append(part)
|
||||
return tuple(parts)
|
||||
|
||||
try:
|
||||
return version_tuple(latest) > version_tuple(current)
|
||||
except (ValueError, TypeError):
|
||||
return latest != current
|
||||
|
||||
|
||||
def get_github_token() -> str:
|
||||
"""
|
||||
Retrieve GitHub API token from Vault.
|
||||
|
||||
Returns:
|
||||
GitHub API token
|
||||
|
||||
Raises:
|
||||
Exception if Vault authentication fails or token cannot be retrieved
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
client = get_vault_client()
|
||||
|
||||
try:
|
||||
github_secret = client.secrets.kv.v2.read_secret_version(
|
||||
raise_on_deleted_version=True,
|
||||
mount_point='kv',
|
||||
path='service/github/neoloc/tokens/read-only-token'
|
||||
)
|
||||
github_token = github_secret['data']['data']['token']
|
||||
logger.debug("Successfully retrieved GitHub token from Vault")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to retrieve GitHub token from Vault: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if not github_token:
|
||||
logger.error("GitHub token is empty")
|
||||
sys.exit(1)
|
||||
|
||||
return github_token
|
||||
|
||||
|
||||
# ==================== GITEA API FUNCTIONS ====================
|
||||
|
||||
def normalize_version(version: str) -> str:
|
||||
@ -457,7 +579,7 @@ def build_package_docker(
|
||||
if dry_run:
|
||||
logger.info(f"[DRY RUN] Would build Docker image: {image_name}")
|
||||
logger.info(f"[DRY RUN] Would use base image: {base_image}")
|
||||
logger.info(f"[DRY RUN] Would pass build arguments:")
|
||||
logger.info("[DRY RUN] Would pass build arguments:")
|
||||
logger.info(f"[DRY RUN] PACKAGE_NAME={package_name}")
|
||||
logger.info(f"[DRY RUN] PACKAGE_VERSION={package_version}")
|
||||
logger.info(f"[DRY RUN] PACKAGE_RELEASE={package_release}")
|
||||
@ -891,6 +1013,178 @@ class Builder:
|
||||
self.dist_dir.mkdir()
|
||||
self.logger.info("Cleaned dist directory")
|
||||
|
||||
def check_package_updates(self, package_name: str, dry_run: bool = False) -> bool:
|
||||
"""
|
||||
Check for updates for a single package from GitHub releases.
|
||||
|
||||
Args:
|
||||
package_name: Name of the package to check
|
||||
dry_run: If True, only show what would be done
|
||||
|
||||
Returns:
|
||||
True if package was updated or no update needed, False if error
|
||||
"""
|
||||
package_dir = self.rpms_dir / package_name
|
||||
metadata_file = package_dir / "metadata.yaml"
|
||||
|
||||
if not metadata_file.exists():
|
||||
self.logger.warning(f"No metadata.yaml found for {package_name}")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(metadata_file, 'r') as f:
|
||||
metadata_data = yaml.safe_load(f)
|
||||
|
||||
# Convert to dataclass for easier handling
|
||||
builds = [Build(**build) for build in metadata_data.get('builds', [])]
|
||||
package_metadata = PackageMetadata(
|
||||
name=metadata_data.get('name', package_name),
|
||||
github=metadata_data.get('github', ''),
|
||||
description=metadata_data.get('description', ''),
|
||||
arch=metadata_data.get('arch', 'amd64'),
|
||||
platform=metadata_data.get('platform', 'linux'),
|
||||
maintainer=metadata_data.get('maintainer', ''),
|
||||
homepage=metadata_data.get('homepage', ''),
|
||||
license=metadata_data.get('license', ''),
|
||||
builds=builds
|
||||
)
|
||||
|
||||
if not package_metadata.github:
|
||||
self.logger.debug(f"Package {package_name} has no GitHub repo configured")
|
||||
return True
|
||||
|
||||
self.logger.info(f"Checking {package_name} from {package_metadata.github}")
|
||||
|
||||
# Get latest release from GitHub
|
||||
latest_release = get_github_latest_release(package_metadata.github)
|
||||
if not latest_release:
|
||||
return False
|
||||
|
||||
latest_version = normalize_github_version(latest_release.get('tag_name', ''))
|
||||
if not latest_version:
|
||||
self.logger.warning(f"Could not determine latest version for {package_name}")
|
||||
return False
|
||||
|
||||
# Check if any build needs updating
|
||||
updated = False
|
||||
for i, build in enumerate(package_metadata.builds):
|
||||
if compare_versions(build.version, latest_version):
|
||||
# Determine distro suffix based on repository configuration
|
||||
distro_suffix = self._get_distro_suffix(build.repository)
|
||||
new_release = f"1-{distro_suffix}" if distro_suffix else "1"
|
||||
|
||||
self.logger.info(f"New version available for {package_name}: {build.version} -> {latest_version}")
|
||||
if not dry_run:
|
||||
package_metadata.builds[i].version = latest_version
|
||||
package_metadata.builds[i].release = new_release
|
||||
updated = True
|
||||
else:
|
||||
self.logger.info(f"[DRY RUN] Would update {package_name} to {latest_version} with release {new_release}")
|
||||
updated = True
|
||||
|
||||
if updated and not dry_run:
|
||||
# Convert back to dict and save
|
||||
updated_data = {
|
||||
'name': package_metadata.name,
|
||||
'github': package_metadata.github,
|
||||
'description': package_metadata.description,
|
||||
'arch': package_metadata.arch,
|
||||
'platform': package_metadata.platform,
|
||||
'maintainer': package_metadata.maintainer,
|
||||
'homepage': package_metadata.homepage,
|
||||
'license': package_metadata.license,
|
||||
'builds': [
|
||||
{
|
||||
'repository': build.repository,
|
||||
'image': build.image,
|
||||
'release': build.release,
|
||||
'version': build.version
|
||||
}
|
||||
for build in package_metadata.builds
|
||||
]
|
||||
}
|
||||
|
||||
with open(metadata_file, 'w') as f:
|
||||
yaml.dump(updated_data, f, default_flow_style=False, sort_keys=False)
|
||||
|
||||
self.logger.info(f"Successfully updated {package_name} to version {latest_version}")
|
||||
elif not updated:
|
||||
self.logger.info(f"Package {package_name} is up to date")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to check package {package_name}: {e}")
|
||||
return False
|
||||
|
||||
def upgrade_all(self, dry_run: bool = False) -> bool:
|
||||
"""
|
||||
Check for updates for all packages with GitHub repos configured.
|
||||
|
||||
Args:
|
||||
dry_run: If True, only show what would be done
|
||||
|
||||
Returns:
|
||||
True if all packages were processed successfully
|
||||
"""
|
||||
if not self.rpms_dir.exists():
|
||||
self.logger.error(f"RPMs directory not found: {self.rpms_dir}")
|
||||
return False
|
||||
|
||||
github_packages = []
|
||||
|
||||
# Find packages with GitHub repos
|
||||
for package_dir in self.rpms_dir.iterdir():
|
||||
if not package_dir.is_dir() or package_dir.name.startswith('.'):
|
||||
continue
|
||||
|
||||
metadata_file = package_dir / "metadata.yaml"
|
||||
if not metadata_file.exists():
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(metadata_file, 'r') as f:
|
||||
metadata = yaml.safe_load(f)
|
||||
|
||||
if metadata.get('github'):
|
||||
github_packages.append(package_dir.name)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not github_packages:
|
||||
self.logger.info("No packages with GitHub repos found")
|
||||
return True
|
||||
|
||||
self.logger.info(f"Found {len(github_packages)} packages with GitHub repos")
|
||||
|
||||
success_count = 0
|
||||
for package_name in github_packages:
|
||||
if self.check_package_updates(package_name, dry_run):
|
||||
success_count += 1
|
||||
|
||||
self.logger.info(f"Successfully processed {success_count}/{len(github_packages)} packages")
|
||||
return success_count == len(github_packages)
|
||||
|
||||
def _get_distro_suffix(self, repositories: List[str]) -> str:
|
||||
"""
|
||||
Determine the distro suffix based on repository configuration.
|
||||
|
||||
Args:
|
||||
repositories: List of repositories (e.g., ['almalinux/el9', 'fedora/f33'])
|
||||
|
||||
Returns:
|
||||
Distro suffix string (e.g., 'el9', 'f33') or empty string if not determinable
|
||||
"""
|
||||
if not repositories:
|
||||
return ""
|
||||
|
||||
# Use the first repository and extract the part after the '/'
|
||||
repo = repositories[0]
|
||||
if '/' in repo:
|
||||
return repo.split('/', 1)[1]
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
# ==================== TYPER APPLICATION ====================
|
||||
|
||||
@ -1122,5 +1416,53 @@ def verify(
|
||||
typer.echo()
|
||||
typer.echo("🎉 All metadata.yaml files are valid!")
|
||||
|
||||
@app.command()
|
||||
def upgrade(
|
||||
package_name: Optional[str] = typer.Argument(None, help="Package name to upgrade (optional, upgrades all if not provided)"),
|
||||
dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be done without making changes"),
|
||||
verbose: bool = typer.Option(False, "--verbose", "-v", help="Enable verbose logging")
|
||||
):
|
||||
"""Check for package upgrades from GitHub releases and update metadata.yaml files."""
|
||||
setup_logging(verbose)
|
||||
|
||||
try:
|
||||
root_dir = Path(__file__).parent.parent
|
||||
builder = Builder(root_dir)
|
||||
|
||||
if package_name:
|
||||
# Upgrade single package
|
||||
success = builder.check_package_updates(package_name, dry_run)
|
||||
if not success:
|
||||
raise typer.Exit(1)
|
||||
else:
|
||||
# Upgrade all packages
|
||||
success = builder.upgrade_all(dry_run)
|
||||
if not success:
|
||||
raise typer.Exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
typer.echo("Upgrade check interrupted by user")
|
||||
raise typer.Exit(130)
|
||||
|
||||
@app.command("upgrade-all")
|
||||
def upgrade_all(
|
||||
dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be done without making changes"),
|
||||
verbose: bool = typer.Option(False, "--verbose", "-v", help="Enable verbose logging")
|
||||
):
|
||||
"""Check for upgrades for all packages with GitHub repos configured."""
|
||||
setup_logging(verbose)
|
||||
|
||||
try:
|
||||
root_dir = Path(__file__).parent.parent
|
||||
builder = Builder(root_dir)
|
||||
|
||||
success = builder.upgrade_all(dry_run)
|
||||
if not success:
|
||||
raise typer.Exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
typer.echo("Upgrade check interrupted by user")
|
||||
raise typer.Exit(130)
|
||||
|
||||
if __name__ == '__main__':
|
||||
app()
|
||||
|
||||
Loading…
Reference in New Issue
Block a user