diff --git a/.github/workflows/build_debian.yml b/.github/workflows/build_debian.yml index 49a5345..3ac963c 100644 --- a/.github/workflows/build_debian.yml +++ b/.github/workflows/build_debian.yml @@ -69,7 +69,7 @@ jobs: env: LP_CREDENTIALS_FILE: /tmp/lp-creds.txt run: | - python3 scripts/copy_to_other_distributions.py + python3 scripts/launchpad_copy.py copy-to-series - name: Cleanup Launchpad credentials if: always() run: rm -f /tmp/lp-creds.txt @@ -102,7 +102,7 @@ jobs: env: LP_CREDENTIALS_FILE: /tmp/lp-creds.txt run: | - python3 scripts/copy_package_proposed_to_ppa.py + python3 scripts/launchpad_copy.py promote - name: Cleanup Launchpad credentials if: always() run: rm -f /tmp/lp-creds.txt diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml new file mode 100644 index 0000000..2f7a47b --- /dev/null +++ b/.github/workflows/python_tests.yml @@ -0,0 +1,22 @@ +name: Python Tests +on: + push: + branches: [main] + pull_request: + branches: [main] +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y distro-info + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + pip install pytest vcrpy launchpadlib + - name: Run tests + run: | + python3 -m pytest tests/ -v diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 460721c..e032885 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,3 +14,8 @@ repos: rev: v1.7.7 hooks: - id: actionlint + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.15.2 + hooks: + - id: ruff-check + - id: ruff-format diff --git a/kolibri_server_setup.py b/kolibri_server_setup.py index 3773c89..3ce157d 100755 --- a/kolibri_server_setup.py +++ b/kolibri_server_setup.py @@ -4,13 +4,11 @@ import subprocess import kolibri.utils.pskolibri as psutil - -from kolibri.core.content.utils.paths import get_content_dir_path from kolibri.core.utils.cache import RedisSettingsHelper from kolibri.core.utils.cache import process_cache -from kolibri.utils.options import update_options_file -from kolibri.utils.conf import OPTIONS from kolibri.utils.conf import KOLIBRI_HOME +from kolibri.utils.conf import OPTIONS +from kolibri.utils.options import update_options_file # read the config file options port = OPTIONS["Deployment"]["HTTP_PORT"] @@ -64,12 +62,14 @@ def set_port(port): """ update_options_file("Deployment", "HTTP_PORT", port) + def set_zip_content_port(port): """ Modify Kolibri options to set the TCP port the hashi files will be served on """ update_options_file("Deployment", "ZIP_CONTENT_PORT", port) + def delete_redis_cache(): """ Delete previous cache in redis to reset it when the service starts. @@ -141,7 +141,7 @@ def check_redis_service(): def save_nginx_conf_port(port, zip_port, listen_address="0.0.0.0", nginx_conf=None): """ - Adds the port for nginx to run to an existing config file. + Adds the port for nginx to run to an existing config file. """ if nginx_conf is None: @@ -210,9 +210,7 @@ def save_nginx_conf_port(port, zip_port, listen_address="0.0.0.0", nginx_conf=No help="Port to run hashi iframes used when installing/reconfiguring kolibri-server package", ) args = parser.parse_args() - if ( - args.debconfport - ): # To be executed only when installing/reconfiguring the Debian package + if args.debconfport: # To be executed only when installing/reconfiguring the Debian package set_port(args.debconfport) if args.debconfzipport: set_zip_content_port(args.debconfzipport) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..26a7be2 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,13 @@ +[tool.ruff] +target-version = "py310" +line-length = 120 + +[tool.ruff.lint] +select = ["E", "F", "I", "PLC0415"] +ignore = ["E501"] + +[tool.ruff.lint.isort] +force-single-line = true + +[tool.ruff.lint.per-file-ignores] +"tests/*" = ["E501"] diff --git a/scripts/copy_package_proposed_to_ppa.py b/scripts/copy_package_proposed_to_ppa.py deleted file mode 100644 index 51df9ab..0000000 --- a/scripts/copy_package_proposed_to_ppa.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Promote published packages from kolibri-proposed to kolibri PPA without rebuilds. - -Usage (env): - LP_CREDENTIALS_FILE=/path/to/creds python3 scripts/ppa_promote.py -""" - -import os -import sys -from launchpadlib.launchpad import Launchpad -import lazr.restfulclient.errors as lre - - -OWNER = "learningequality" -SRC_PPA = "kolibri-proposed" -DST_PPA = "kolibri" -PACKAGE_WHITELIST = {"kolibri-server"} -APP_NAME = "ppa-kolibri-server-jammy-package" - - -def main(): - - - lp = Launchpad.login_with( - application_name=APP_NAME, - service_root="production", - ) - - owner = lp.people[OWNER] - source_ppa = owner.getPPAByName(name=SRC_PPA) - dest_ppa = owner.getPPAByName(name=DST_PPA) - - packages = source_ppa.getPublishedSources(status="Published", order_by_date=True) - - copied_any = False - for pkg in packages: - if pkg.source_package_name not in PACKAGE_WHITELIST: - continue - try: - print( - f"Copying {pkg.source_package_name} {pkg.source_package_version} " - f"({pkg.distro_series_link}) to {DST_PPA}" - ) - dest_ppa.copyPackage( - from_archive=source_ppa, - include_binaries=True, - to_pocket=pkg.pocket, - source_name=pkg.source_package_name, - version=pkg.source_package_version, - ) - copied_any = True - except lre.BadRequest as e: - if "is obsolete and will not accept new uploads" in str(e): - print( - f"Skip obsolete series for {pkg.source_package_name} {pkg.source_package_version}" - ) - else: - raise - - if not copied_any: - print("No eligible packages to promote.") - else: - print("Promotion requests submitted.") - - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) diff --git a/scripts/copy_to_other_distributions.py b/scripts/copy_to_other_distributions.py deleted file mode 100644 index 1f938f0..0000000 --- a/scripts/copy_to_other_distributions.py +++ /dev/null @@ -1,353 +0,0 @@ -#!/usr/bin/python -"""Copy published precise PPA packages to other configured active dists in Launchpad. - -Note on Xenial: Builds on Xenial may break, so we build for Bionic and copy binaries -for Xenial. See: https://github.com/learningequality/kolibri-server/pull/9 - -Typical usage: - - - build a kolibri-server package for bionic - - dput ppa:build/kolibri-server-source_0.9.0~b2-0ubuntu1_source.changes - - wait for it to be built - - run python ppa-copy-packages.py - -Based on https://gist.github.com/mgedmin/7689218 -""" - -import functools -import logging -import optparse -import sys -import time -from collections import defaultdict -import subprocess - -import os -from launchpadlib.launchpad import Launchpad - - -# -# Hardcoded configuration -# - -PPA_OWNER = 'learningequality' -PPA_NAME = 'kolibri-proposed' -PACKAGE_WHITELIST = ['kolibri-server'] - -SOURCE_SERIES = 'jammy' -## Fall back if dynamic fetching does not work. -TARGET_SERIESES = ["plucky", "noble", "jammy", "focal"] -POCKET = 'Release' - -APP_NAME = 'ppa-kolibri-server-copy-packages' - - -log = logging.getLogger(APP_NAME) - -STARTUP_TIME = LAST_LOG_TIME = time.time() -REQUESTS = LAST_REQUESTS = 0 - - -def get_launchpad_client(): - # If LP_CREDENTIALS_FILE is set, launchpadlib will automatically use it. - # Use the same application name as used to create credentials. - return Launchpad.login_with( - application_name="ppa-kolibri-server-jammy-package", - service_root="production", - ) - - -def get_supported_series_dynamically(source_series): - try: - out = subprocess.check_output(["ubuntu-distro-info", "--supported"], text=True).strip() - all_series = out.split() - series = [s for s in all_series if s and s != source_series] - log.info("Dynamic series discovery:") - log.info(" Target series (will copy to): %s", ', '.join(series)) - return series - except Exception as e: - log.warning("Failed to get dynamic series list: %s", e) - log.info("Falling back to hardcoded target series: %s", ', '.join(TARGET_SERIESES)) - return TARGET_SERIESES - - -class DebugFormatter(logging.Formatter): - - def format(self, record): - global LAST_LOG_TIME, LAST_REQUESTS - msg = super(DebugFormatter, self).format(record) - if msg.startswith(" "): # continuation of previous message - return msg - now = time.time() - elapsed = now - STARTUP_TIME - delta = now - LAST_LOG_TIME - LAST_LOG_TIME = now - delta_requests = REQUESTS - LAST_REQUESTS - LAST_REQUESTS = REQUESTS - return "\n%.3fs (%+.3fs) [%d/+%d] %s" % (elapsed, delta, REQUESTS, delta_requests, msg) - - -def enable_http_debugging(): - import httplib2 - httplib2.debuglevel = 1 - - -def install_request_counter(): - import httplib2 - orig = httplib2.Http.request - - @functools.wraps(orig) - def wrapper(*args, **kw): - global REQUESTS - REQUESTS += 1 - return orig(*args, **kw) - - httplib2.Http.request = wrapper - - -def set_up_logging(level=logging.INFO): - handler = logging.StreamHandler(sys.stdout) - if level == logging.DEBUG: - handler.setFormatter(DebugFormatter()) - log.addHandler(handler) - log.setLevel(level) - - -# -# Caching decorators -# - -class once(object): - """A @property that is computed only once per instance. - - Also known as @reify in Pyramid and @Lazy in Zope. - """ - - def __init__(self, fn): - self.fn = fn - - def __get__(self, obj, type=None): - value = self.fn(obj) - # Put the value in the instance __dict__. Since the once - # descriptor has no __set__ and is therefore not a data - # descriptor, instance __dict__ will take precedence on - # subsequent attribute accesses. - setattr(obj, self.fn.__name__, value) - return value - - -def cache(fn): - """Trivial memoization decorator.""" - cache = fn.cache = {} - - @functools.wraps(fn) - def inner(*args): - # The way we use the cache is suboptimal for methods: it is - # shared between all instances, but the instance is part of - # the cache lookup key, negating any advantage of the sharing. - # Luckily in this script there's only one instance. - try: - return cache[args] - except KeyError: - value = cache[args] = fn(*args) - return value - except TypeError: - raise TypeError('%s argument types preclude caching: %s' % - (fn.__name__, repr(args))) - - return inner - - -# -# Launchpad API wrapper with heavy caching -# - - -class LaunchpadWrapper(object): - application_name = 'ppa-kolibri-server-copy-packages' - launchpad_instance = 'production' - - ppa_owner = PPA_OWNER - ppa_name = PPA_NAME - - def __init__(self): - self.queue = defaultdict(set) - - # Trivial caching wrappers for Launchpad API - @once - def lp(self): - log.debug("Logging in...") - # cost: 2 HTTP requests - return get_launchpad_client() - - @once - def owner(self): - lp = self.lp # ensure correct ordering of debug messages - log.debug("Getting the owner...") - # cost: 1 HTTP request - return lp.people[self.ppa_owner] - - @once - def ppa(self): - owner = self.owner # ensure correct ordering of debug messages - log.debug("Getting the PPA...") - # cost: 1 HTTP request - return owner.getPPAByName(name=self.ppa_name) - - @cache - def get_series(self, name): - ppa = self.ppa # ensure correct ordering of debug messages - log.debug("Locating the series: %s...", name) - # cost: 1 HTTP request - return ppa.distribution.getSeries(name_or_version=name) - - @cache - def get_published_sources(self, series_name): - series = self.get_series(series_name) - log.debug("Listing source packages for %s...", series_name) - # cost: 1 HTTP request - return self.ppa.getPublishedSources(distro_series=series) - - @cache - def get_builds_for_source(self, source): - log.debug("Listing %s builds for %s %s...", - source.distro_series_link.rpartition('/')[-1], - source.source_package_name, - source.source_package_version) - # cost: 1 HTTP request, plus another one for accessing the list - return source.getBuilds() - - @cache - def get_source_packages(self, series_name, package_names=None): - """Return {package_name: {version: 'Status', ...}, ...}""" - res = defaultdict(dict) - for source in self.get_published_sources(series_name): - name = source.source_package_name - if package_names is not None and name not in package_names: - continue - version = source.source_package_version - res[name][version] = source - return res - - def get_source_for(self, name, version, series_name): - sources = self.get_source_packages(series_name) - return sources.get(name, {}).get(version) - - def is_missing(self, name, version, series_name): - return self.get_source_for(name, version, series_name) is None - - def get_builds_for(self, name, version, series_name): - source = self.get_source_for(name, version, series_name) - if not source: - return None - return self.get_builds_for_source(source) - - def has_published_binaries(self, name, version, series_name): - builds = self.get_builds_for(name, version, series_name) - return not builds or builds[0].buildstate == u'Successfully built' - - @cache - def get_usable_sources(self, package_names, series_name): - res = [] - for source in self.get_published_sources(series_name): - name = source.source_package_name - if name not in package_names: - continue - version = source.source_package_version - if source.status in ('Superseded', 'Deleted', 'Obsolete'): - log.info("%s %s is %s in %s", name, version, - source.status.lower(), series_name) - continue - if source.status != ('Published'): - log.warning("%s %s is %s in %s", name, version, - source.status.lower(), series_name) - continue - res.append((name, version)) - return res - - def queue_copy(self, name, source_series, target_series, pocket): - self.queue[source_series, target_series, pocket].add(name) - - def perform_queued_copies(self): - first = True - for (source_series, target_series, pocket), names in self.queue.items(): - if not names: - continue - if first: - log.info("") - first = False - log.info("Copying %s to %s", ', '.join(sorted(names)), - target_series) - self.ppa.syncSources(from_archive=self.ppa, - to_series=target_series, - to_pocket=pocket, - include_binaries=True, - source_names=sorted(names)) - - -def main(): - parser = optparse.OptionParser('usage: %prog [options]', - description="copy ppa:%s/%s packages from %s to %s" - % (PPA_OWNER, PPA_NAME, SOURCE_SERIES, ', '.join(TARGET_SERIESES))) - parser.add_option('-v', '--verbose', action='count') - parser.add_option('-q', '--quiet', action='store_true') - parser.add_option('--debug', action='store_true') - opts, args = parser.parse_args() - - if opts.quiet: - set_up_logging(logging.WARNING) - elif opts.debug: - enable_http_debugging() - install_request_counter() - set_up_logging(logging.DEBUG) - elif opts.verbose > 1: - install_request_counter() - set_up_logging(logging.DEBUG) - else: - set_up_logging(logging.INFO) - - log.info("Spinning up the Launchpad API to copy targets in {}".format(", ".join(PACKAGE_WHITELIST))) - - lp = LaunchpadWrapper() - - for (name, version) in lp.get_usable_sources(tuple(PACKAGE_WHITELIST), - SOURCE_SERIES): - mentioned = False - notices = [] - target_series_names = get_supported_series_dynamically(SOURCE_SERIES) - for target_series_name in target_series_names: - source = lp.get_source_for(name, version, target_series_name) - if source is None: - mentioned = True - log.info("%s %s missing from %s", name, version, - target_series_name) - if lp.has_published_binaries(name, version, - SOURCE_SERIES): - lp.queue_copy(name, SOURCE_SERIES, - target_series_name, POCKET) - else: - builds = lp.get_builds_for(name, version, SOURCE_SERIES) - if builds: - log.info(" but it isn't built yet (state: %s) - %s", - builds[0].buildstate, builds[0].web_link) - elif source.status != 'Published': - notices.append(" but it is %s in %s" % - (source.status.lower(), target_series_name)) - elif not lp.has_published_binaries(name, version, target_series_name): - builds = lp.get_builds_for(name, version, target_series_name) - if builds: - notices.append(" but it isn't built yet for %s (state: %s) - %s" % - (target_series_name, builds[0].buildstate, - builds[0].web_link)) - if not mentioned or notices: - log.info("%s %s", name, version) - for notice in notices: - log.info(notice) - - lp.perform_queued_copies() - - log.debug("All done") - - -if __name__ == '__main__': - main() diff --git a/scripts/create_lp_creds.py b/scripts/create_lp_creds.py index a62f447..4e0a599 100644 --- a/scripts/create_lp_creds.py +++ b/scripts/create_lp_creds.py @@ -1,6 +1,6 @@ -from launchpadlib.launchpad import Launchpad import os +from launchpadlib.launchpad import Launchpad # 1) One-time: generate credentials locally # Install launchpadlib: @@ -15,7 +15,6 @@ # Paste the credentials file content. - # Exact APP name should be passed in build workflow # Launchpad.login_with(application_name="APP_NAME", ...) APP_NAME = "ppa-kolibri-server-jammy-package" diff --git a/scripts/launchpad_copy.py b/scripts/launchpad_copy.py new file mode 100644 index 0000000..fde22ef --- /dev/null +++ b/scripts/launchpad_copy.py @@ -0,0 +1,406 @@ +#!/usr/bin/env python3 +"""Consolidated Launchpad PPA copy tool. + +Subcommands: + copy-to-series Copy packages from source series to all other supported series within a PPA. + promote Copy all published packages from one PPA to another. +""" + +import argparse +import functools +import logging +import subprocess +import sys +import time +from collections import defaultdict + +import httplib2 +import lazr.restfulclient.errors as lre +from launchpadlib.launchpad import Launchpad + +# --- Constants --- + +PPA_OWNER = "learningequality" +PROPOSED_PPA_NAME = "kolibri-proposed" +RELEASE_PPA_NAME = "kolibri" +PACKAGE_WHITELIST = ["kolibri-server"] +POCKET = "Release" +APP_NAME = "ppa-kolibri-server-copy-packages" + +log = logging.getLogger(APP_NAME) + +STARTUP_TIME = LAST_LOG_TIME = time.time() +REQUESTS = LAST_REQUESTS = 0 + + +# --- Utilities --- + + +def get_current_series(): + """Get the Ubuntu series codename for the current system.""" + return subprocess.check_output(["lsb_release", "-cs"], text=True).strip() + + +def get_supported_series(source_series): + """Discover supported Ubuntu series dynamically, including ESM/ELTS.""" + out = subprocess.check_output(["ubuntu-distro-info", "--supported-esm"], text=True).strip() + all_series = out.split() + series = [s for s in all_series if s and s != source_series] + log.info("Dynamic series discovery:") + log.info(" Target series (will copy to): %s", ", ".join(series)) + return series + + +class DebugFormatter(logging.Formatter): + def format(self, record): + global LAST_LOG_TIME, LAST_REQUESTS + msg = super().format(record) + if msg.startswith(" "): + return msg + now = time.time() + elapsed = now - STARTUP_TIME + delta = now - LAST_LOG_TIME + LAST_LOG_TIME = now + delta_requests = REQUESTS - LAST_REQUESTS + LAST_REQUESTS = REQUESTS + return "\n%.3fs (%+.3fs) [%d/+%d] %s" % ( + elapsed, + delta, + REQUESTS, + delta_requests, + msg, + ) + + +def enable_http_debugging(): + httplib2.debuglevel = 1 + + +def install_request_counter(): + orig = httplib2.Http.request + + @functools.wraps(orig) + def wrapper(*args, **kw): + global REQUESTS + REQUESTS += 1 + return orig(*args, **kw) + + httplib2.Http.request = wrapper + + +def set_up_logging(level=logging.INFO): + handler = logging.StreamHandler(sys.stdout) + if level == logging.DEBUG: + handler.setFormatter(DebugFormatter()) + log.addHandler(handler) + log.setLevel(level) + + +# --- LaunchpadWrapper --- + + +class LaunchpadWrapper: + """Cached wrapper around the Launchpad API.""" + + def __init__(self): + self.queue = defaultdict(set) + + @functools.cached_property + def lp(self): + log.debug("Logging in...") + return Launchpad.login_with( + application_name=APP_NAME, + service_root="production", + ) + + @functools.cached_property + def owner(self): + lp = self.lp + log.debug("Getting the owner...") + return lp.people[PPA_OWNER] + + def get_ppa(self, name): + owner = self.owner + log.debug("Getting PPA: %s...", name) + return owner.getPPAByName(name=name) + + @functools.cached_property + def proposed_ppa(self): + return self.get_ppa(PROPOSED_PPA_NAME) + + @functools.cached_property + def release_ppa(self): + return self.get_ppa(RELEASE_PPA_NAME) + + @functools.cache + def get_series(self, name): + ppa = self.proposed_ppa + log.debug("Locating the series: %s...", name) + return ppa.distribution.getSeries(name_or_version=name) + + @functools.cache + def get_published_sources(self, ppa, series_name=None, status=None): + kwargs = {} + if series_name: + kwargs["distro_series"] = self.get_series(series_name) + if status: + kwargs["status"] = status + kwargs["order_by_date"] = True + log.debug("Listing source packages...") + return ppa.getPublishedSources(**kwargs) + + @functools.cache + def get_builds_for_source(self, source): + log.debug( + "Listing %s builds for %s %s...", + source.distro_series_link.rpartition("/")[-1], + source.source_package_name, + source.source_package_version, + ) + return source.getBuilds() + + @functools.cache + def get_source_packages(self, ppa, series_name, package_names=None): + """Return {package_name: {version: source, ...}, ...}""" + res = defaultdict(dict) + for source in self.get_published_sources(ppa, series_name): + name = source.source_package_name + if package_names is not None and name not in package_names: + continue + res[name][source.source_package_version] = source + return res + + def get_source_for(self, ppa, name, version, series_name): + sources = self.get_source_packages(ppa, series_name) + return sources.get(name, {}).get(version) + + def is_missing(self, ppa, name, version, series_name): + return self.get_source_for(ppa, name, version, series_name) is None + + def get_builds_for(self, ppa, name, version, series_name): + source = self.get_source_for(ppa, name, version, series_name) + if not source: + return None + return self.get_builds_for_source(source) + + def has_published_binaries(self, ppa, name, version, series_name): + builds = self.get_builds_for(ppa, name, version, series_name) + return not builds or builds[0].buildstate == "Successfully built" + + @functools.cache + def get_usable_sources(self, ppa, package_names, series_name): + res = [] + for source in self.get_published_sources(ppa, series_name): + name = source.source_package_name + if name not in package_names: + continue + version = source.source_package_version + if source.status in ("Superseded", "Deleted", "Obsolete"): + log.info( + "%s %s is %s in %s", + name, + version, + source.status.lower(), + series_name, + ) + continue + if source.status != "Published": + log.warning( + "%s %s is %s in %s", + name, + version, + source.status.lower(), + series_name, + ) + continue + res.append((name, version)) + return res + + def queue_copy(self, name, source_series, target_series, pocket): + self.queue[source_series, target_series, pocket].add(name) + + def perform_queued_copies(self, ppa): + first = True + for (source_series, target_series, pocket), names in self.queue.items(): + if not names: + continue + if first: + log.info("") + first = False + log.info("Copying %s to %s", ", ".join(sorted(names)), target_series) + ppa.syncSources( + from_archive=ppa, + to_series=target_series, + to_pocket=pocket, + include_binaries=True, + source_names=sorted(names), + ) + + def copy_to_series(self): + """Copy packages from source series to all other supported Ubuntu series.""" + source_series = get_current_series() + log.info( + "Spinning up the Launchpad API to copy targets in %s (source series: %s)", + ", ".join(PACKAGE_WHITELIST), + source_series, + ) + + ppa = self.proposed_ppa + + for name, version in self.get_usable_sources(ppa, tuple(PACKAGE_WHITELIST), source_series): + mentioned = False + notices = [] + target_series_names = get_supported_series(source_series) + for target_series_name in target_series_names: + source = self.get_source_for(ppa, name, version, target_series_name) + if source is None: + mentioned = True + log.info("%s %s missing from %s", name, version, target_series_name) + if self.has_published_binaries(ppa, name, version, source_series): + self.queue_copy(name, source_series, target_series_name, POCKET) + else: + builds = self.get_builds_for(ppa, name, version, source_series) + if builds: + log.info( + " but it isn't built yet (state: %s) - %s", + builds[0].buildstate, + builds[0].web_link, + ) + elif source.status != "Published": + notices.append(" but it is %s in %s" % (source.status.lower(), target_series_name)) + elif not self.has_published_binaries(ppa, name, version, target_series_name): + builds = self.get_builds_for(ppa, name, version, target_series_name) + if builds: + notices.append( + " but it isn't built yet for %s (state: %s) - %s" + % ( + target_series_name, + builds[0].buildstate, + builds[0].web_link, + ) + ) + if not mentioned or notices: + log.info("%s %s", name, version) + for notice in notices: + log.info(notice) + + self.perform_queued_copies(ppa) + log.debug("All done") + return 0 + + def promote(self): + """Promote published packages from kolibri-proposed to kolibri PPA.""" + log.info("Promoting packages from %s to %s", PROPOSED_PPA_NAME, RELEASE_PPA_NAME) + + source_ppa = self.proposed_ppa + dest_ppa = self.release_ppa + + packages = source_ppa.getPublishedSources(status="Published", order_by_date=True) + + copied_any = False + for pkg in packages: + if pkg.source_package_name not in PACKAGE_WHITELIST: + continue + try: + log.info( + "Copying %s %s (%s) to %s", + pkg.source_package_name, + pkg.source_package_version, + pkg.distro_series_link, + RELEASE_PPA_NAME, + ) + dest_ppa.copyPackage( + from_archive=source_ppa, + include_binaries=True, + to_pocket=pkg.pocket, + source_name=pkg.source_package_name, + version=pkg.source_package_version, + ) + copied_any = True + except lre.BadRequest as e: + if "is obsolete and will not accept new uploads" in str(e): + log.info( + "Skip obsolete series for %s %s", + pkg.source_package_name, + pkg.source_package_version, + ) + else: + raise + + if not copied_any: + log.info("No eligible packages to promote.") + else: + log.info("Promotion requests submitted.") + + return 0 + + +# --- CLI --- + + +def build_parser(): + parser = argparse.ArgumentParser(description="Launchpad PPA copy tool for kolibri-server packages.") + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Increase verbosity (use -vv for debug).", + ) + parser.add_argument("-q", "--quiet", action="store_true", help="Suppress info output.") + parser.add_argument("--debug", action="store_true", help="Enable HTTP debug output.") + + subparsers = parser.add_subparsers(dest="command", required=True) + + subparsers.add_parser( + "copy-to-series", + help="Copy packages from source series to all other supported series within a PPA.", + ) + + subparsers.add_parser( + "promote", + help="Promote published packages from kolibri-proposed to kolibri PPA.", + ) + + return parser + + +def configure_logging(args): + if args.quiet: + set_up_logging(logging.WARNING) + elif args.debug: + enable_http_debugging() + install_request_counter() + set_up_logging(logging.DEBUG) + elif args.verbose > 1: + install_request_counter() + set_up_logging(logging.DEBUG) + else: + set_up_logging(logging.INFO) + + +def cmd_copy_to_series(args): + """Copy packages from source series to all other supported Ubuntu series.""" + lp = LaunchpadWrapper() + return lp.copy_to_series() + + +def cmd_promote(args): + """Promote published packages from kolibri-proposed to kolibri PPA.""" + lp = LaunchpadWrapper() + return lp.promote() + + +def main(): + parser = build_parser() + args = parser.parse_args() + configure_logging(args) + + if args.command == "copy-to-series": + return cmd_copy_to_series(args) + elif args.command == "promote": + return cmd_promote(args) + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/test/Dockerfile b/test/Dockerfile index d632cff..15f047a 100644 --- a/test/Dockerfile +++ b/test/Dockerfile @@ -34,7 +34,6 @@ COPY hashi_uwsgi.ini hashi_uwsgi.ini COPY kolibri_server_setup.py kolibri_server_setup.py COPY nginx.conf nginx.conf COPY nginx.d_README nginx.d_README -COPY scripts/copy_to_other_distributions.py copy_to_other_distributions.py COPY uwsgi.d_README uwsgi.d_README COPY uwsgi.ini uwsgi.ini diff --git a/tests/cassettes/.gitkeep b/tests/cassettes/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..17de092 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1 @@ +"""Shared pytest fixtures for launchpad_copy tests.""" diff --git a/tests/test_launchpad_copy.py b/tests/test_launchpad_copy.py new file mode 100644 index 0000000..2d1ff4c --- /dev/null +++ b/tests/test_launchpad_copy.py @@ -0,0 +1,429 @@ +"""Tests for scripts/launchpad_copy.py.""" + +import logging +import os +import shutil +import sys +from unittest.mock import MagicMock +from unittest.mock import patch + +import pytest + +# Add scripts/ to path so we can import launchpad_copy +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts")) + +from launchpad_copy import LaunchpadWrapper +from launchpad_copy import build_parser +from launchpad_copy import configure_logging +from launchpad_copy import get_current_series +from launchpad_copy import get_supported_series +from launchpad_copy import log +from launchpad_copy import main + +# --- Argparse tests --- + + +class TestBuildParser: + """Test the argparse CLI skeleton.""" + + def test_verbose_defaults_to_zero(self): + parser = build_parser() + args = parser.parse_args(["copy-to-series"]) + assert args.verbose == 0 + + def test_verbose_increments_with_v_flags(self): + parser = build_parser() + args = parser.parse_args(["-vv", "copy-to-series"]) + assert args.verbose == 2 + + def test_copy_to_series_subcommand_parsed(self): + parser = build_parser() + args = parser.parse_args(["copy-to-series"]) + assert args.command == "copy-to-series" + + def test_promote_subcommand_parsed(self): + parser = build_parser() + args = parser.parse_args(["promote"]) + assert args.command == "promote" + + def test_subcommand_required(self): + parser = build_parser() + with pytest.raises(SystemExit): + parser.parse_args([]) + + def test_quiet_flag(self): + parser = build_parser() + args = parser.parse_args(["-q", "promote"]) + assert args.quiet is True + + def test_debug_flag(self): + parser = build_parser() + args = parser.parse_args(["--debug", "promote"]) + assert args.debug is True + + +# --- Series discovery tests --- + +has_lsb_release = shutil.which("lsb_release") is not None +has_ubuntu_distro_info = shutil.which("ubuntu-distro-info") is not None + + +@pytest.mark.skipif(not has_lsb_release, reason="lsb_release not available on this system") +class TestGetCurrentSeries: + """Test system series detection using real lsb_release.""" + + def test_returns_non_empty_string(self): + result = get_current_series() + assert isinstance(result, str) + assert len(result) > 0 + + def test_raises_on_missing_command(self): + with patch("subprocess.check_output", side_effect=FileNotFoundError("no cmd")): + with pytest.raises(FileNotFoundError): + get_current_series() + + +@pytest.mark.skipif( + not has_ubuntu_distro_info, + reason="ubuntu-distro-info not available on this system", +) +class TestGetSupportedSeries: + """Test dynamic series discovery using real ubuntu-distro-info.""" + + def test_returns_list_of_series(self): + result = get_supported_series("jammy") + assert isinstance(result, list) + assert len(result) > 0 + + def test_excludes_source_series(self): + result = get_supported_series("jammy") + assert "jammy" not in result + + def test_all_entries_are_non_empty_strings(self): + result = get_supported_series("jammy") + for s in result: + assert isinstance(s, str) + assert len(s) > 0 + + def test_raises_on_missing_command(self): + with patch("subprocess.check_output", side_effect=FileNotFoundError("no cmd")): + with pytest.raises(FileNotFoundError): + get_supported_series("jammy") + + +# --- LaunchpadWrapper tests --- + + +class TestLaunchpadWrapper: + """Test LaunchpadWrapper queue and filtering logic.""" + + def test_queue_copy_accumulates_names(self): + wrapper = LaunchpadWrapper() + wrapper.queue_copy("kolibri-server", "jammy", "noble", "Release") + wrapper.queue_copy("kolibri-server", "jammy", "focal", "Release") + + assert ("jammy", "noble", "Release") in wrapper.queue + assert ("jammy", "focal", "Release") in wrapper.queue + assert "kolibri-server" in wrapper.queue[("jammy", "noble", "Release")] + + def test_queue_starts_empty(self): + wrapper = LaunchpadWrapper() + assert len(wrapper.queue) == 0 + + def test_perform_queued_copies_calls_sync_sources(self): + wrapper = LaunchpadWrapper() + wrapper.queue_copy("kolibri-server", "jammy", "noble", "Release") + + mock_ppa = MagicMock() + wrapper.perform_queued_copies(mock_ppa) + + mock_ppa.syncSources.assert_called_once_with( + from_archive=mock_ppa, + to_series="noble", + to_pocket="Release", + include_binaries=True, + source_names=["kolibri-server"], + ) + + def test_perform_queued_copies_skips_empty_queues(self): + wrapper = LaunchpadWrapper() + mock_ppa = MagicMock() + wrapper.perform_queued_copies(mock_ppa) + + mock_ppa.syncSources.assert_not_called() + + def test_get_usable_sources_filters_by_whitelist(self): + wrapper = LaunchpadWrapper() + mock_ppa = MagicMock() + + src_good = MagicMock() + src_good.source_package_name = "kolibri-server" + src_good.source_package_version = "1.0" + src_good.status = "Published" + + src_bad = MagicMock() + src_bad.source_package_name = "other-package" + src_bad.source_package_version = "2.0" + src_bad.status = "Published" + + with patch.object(wrapper, "get_published_sources", return_value=[src_good, src_bad]): + result = LaunchpadWrapper.get_usable_sources.__wrapped__(wrapper, mock_ppa, ("kolibri-server",), "jammy") + + assert len(result) == 1 + assert result[0] == ("kolibri-server", "1.0") + + def test_get_usable_sources_skips_superseded(self): + wrapper = LaunchpadWrapper() + mock_ppa = MagicMock() + + src = MagicMock() + src.source_package_name = "kolibri-server" + src.source_package_version = "1.0" + src.status = "Superseded" + + with patch.object(wrapper, "get_published_sources", return_value=[src]): + result = LaunchpadWrapper.get_usable_sources.__wrapped__(wrapper, mock_ppa, ("kolibri-server",), "jammy") + + assert len(result) == 0 + + +# --- configure_logging tests --- + + +class TestConfigureLogging: + """Test logging configuration based on parsed args.""" + + def test_default_sets_info_level(self): + parser = build_parser() + args = parser.parse_args(["promote"]) + log.handlers.clear() + configure_logging(args) + assert log.level == logging.INFO + + def test_quiet_sets_warning_level(self): + parser = build_parser() + args = parser.parse_args(["-q", "promote"]) + log.handlers.clear() + configure_logging(args) + assert log.level == logging.WARNING + + def test_vv_sets_debug_level(self): + parser = build_parser() + args = parser.parse_args(["-vv", "promote"]) + log.handlers.clear() + configure_logging(args) + assert log.level == logging.DEBUG + + +# --- main / subcommand dispatch tests --- + + +class TestMainDispatch: + """Test that main dispatches to the correct subcommand.""" + + def test_dispatches_to_copy_to_series(self): + + with ( + patch("launchpad_copy.cmd_copy_to_series", return_value=0) as mock_cmd, + patch("sys.argv", ["launchpad_copy.py", "copy-to-series"]), + ): + result = main() + + mock_cmd.assert_called_once() + assert result == 0 + + def test_dispatches_to_promote(self): + + with ( + patch("launchpad_copy.cmd_promote", return_value=0) as mock_cmd, + patch("sys.argv", ["launchpad_copy.py", "promote"]), + ): + result = main() + + mock_cmd.assert_called_once() + assert result == 0 + + +# --- copy-to-series subcommand tests --- + + +class TestCopyToSeries: + """Test the copy-to-series logic on LaunchpadWrapper.""" + + def test_queues_copy_for_missing_package(self): + wrapper = LaunchpadWrapper() + mock_ppa = MagicMock() + + with ( + patch.object( + type(wrapper), + "proposed_ppa", + new_callable=lambda: property(lambda self: mock_ppa), + ), + patch.object( + wrapper, + "get_usable_sources", + return_value=[("kolibri-server", "0.9.0")], + ), + patch.object(wrapper, "get_source_for", return_value=None), + patch.object(wrapper, "has_published_binaries", return_value=True), + patch("launchpad_copy.get_current_series", return_value="jammy"), + patch("launchpad_copy.get_supported_series", return_value=["noble"]), + ): + wrapper.copy_to_series() + + assert ("jammy", "noble", "Release") in wrapper.queue + assert "kolibri-server" in wrapper.queue[("jammy", "noble", "Release")] + + def test_skips_copy_when_not_built_yet(self): + wrapper = LaunchpadWrapper() + mock_ppa = MagicMock() + + mock_build = MagicMock() + mock_build.buildstate = "Currently building" + mock_build.web_link = "https://example.com" + + with ( + patch.object( + type(wrapper), + "proposed_ppa", + new_callable=lambda: property(lambda self: mock_ppa), + ), + patch.object( + wrapper, + "get_usable_sources", + return_value=[("kolibri-server", "0.9.0")], + ), + patch.object(wrapper, "get_source_for", return_value=None), + patch.object(wrapper, "has_published_binaries", return_value=False), + patch.object(wrapper, "get_builds_for", return_value=[mock_build]), + patch("launchpad_copy.get_current_series", return_value="jammy"), + patch("launchpad_copy.get_supported_series", return_value=["noble"]), + ): + wrapper.copy_to_series() + + assert len(wrapper.queue) == 0 + + def test_returns_zero(self): + wrapper = LaunchpadWrapper() + mock_ppa = MagicMock() + + with ( + patch.object( + type(wrapper), + "proposed_ppa", + new_callable=lambda: property(lambda self: mock_ppa), + ), + patch.object(wrapper, "get_usable_sources", return_value=[]), + patch("launchpad_copy.get_current_series", return_value="jammy"), + patch("launchpad_copy.get_supported_series", return_value=[]), + ): + result = wrapper.copy_to_series() + + assert result == 0 + + +# --- promote subcommand tests --- + + +class TestPromote: + """Test the promote logic on LaunchpadWrapper.""" + + def test_copies_whitelisted_published_package(self): + wrapper = LaunchpadWrapper() + mock_source_ppa = MagicMock() + mock_dest_ppa = MagicMock() + + mock_pkg = MagicMock() + mock_pkg.source_package_name = "kolibri-server" + mock_pkg.source_package_version = "0.9.0" + mock_pkg.distro_series_link = "https://lp/ubuntu/jammy" + mock_pkg.pocket = "Release" + + mock_source_ppa.getPublishedSources.return_value = [mock_pkg] + + with ( + patch.object( + type(wrapper), + "proposed_ppa", + new_callable=lambda: property(lambda self: mock_source_ppa), + ), + patch.object( + type(wrapper), + "release_ppa", + new_callable=lambda: property(lambda self: mock_dest_ppa), + ), + ): + result = wrapper.promote() + + mock_dest_ppa.copyPackage.assert_called_once_with( + from_archive=mock_source_ppa, + include_binaries=True, + to_pocket="Release", + source_name="kolibri-server", + version="0.9.0", + ) + assert result == 0 + + def test_skips_non_whitelisted_package(self): + wrapper = LaunchpadWrapper() + mock_source_ppa = MagicMock() + mock_dest_ppa = MagicMock() + + mock_pkg = MagicMock() + mock_pkg.source_package_name = "other-package" + + mock_source_ppa.getPublishedSources.return_value = [mock_pkg] + + with ( + patch.object( + type(wrapper), + "proposed_ppa", + new_callable=lambda: property(lambda self: mock_source_ppa), + ), + patch.object( + type(wrapper), + "release_ppa", + new_callable=lambda: property(lambda self: mock_dest_ppa), + ), + ): + result = wrapper.promote() + + mock_dest_ppa.copyPackage.assert_not_called() + assert result == 0 + + def test_handles_obsolete_series_gracefully(self): + wrapper = LaunchpadWrapper() + mock_source_ppa = MagicMock() + mock_dest_ppa = MagicMock() + + mock_pkg = MagicMock() + mock_pkg.source_package_name = "kolibri-server" + mock_pkg.source_package_version = "0.9.0" + mock_pkg.distro_series_link = "https://lp/ubuntu/xenial" + mock_pkg.pocket = "Release" + + mock_source_ppa.getPublishedSources.return_value = [mock_pkg] + + class MockBadRequest(Exception): + pass + + mock_dest_ppa.copyPackage.side_effect = MockBadRequest("xenial is obsolete and will not accept new uploads") + + with ( + patch.object( + type(wrapper), + "proposed_ppa", + new_callable=lambda: property(lambda self: mock_source_ppa), + ), + patch.object( + type(wrapper), + "release_ppa", + new_callable=lambda: property(lambda self: mock_dest_ppa), + ), + patch("launchpad_copy.lre") as mock_lre, + ): + mock_lre.BadRequest = MockBadRequest + result = wrapper.promote() + + assert result == 0 diff --git a/tests/vcr_config.py b/tests/vcr_config.py new file mode 100644 index 0000000..a8ce87d --- /dev/null +++ b/tests/vcr_config.py @@ -0,0 +1,10 @@ +"""VCR configuration for recording/replaying HTTP interactions.""" + +import vcr + +my_vcr = vcr.VCR( + cassette_library_dir="tests/cassettes", + record_mode="new_episodes", + path_transformer=vcr.VCR.ensure_suffix(".yaml"), + filter_headers=["authorization"], +)