diff --git a/.gitignore b/.gitignore index bf73cef..90dfb50 100644 --- a/.gitignore +++ b/.gitignore @@ -83,3 +83,4 @@ /module-build-service-2.30.4.tar.gz /module-build-service-3.4.1.tar.gz /module-build-service-3.6.1.tar.gz +/module-build-service-3.8.0.tar.gz diff --git a/mbs-PR1458-only-use-default-modules.patch b/mbs-PR1458-only-use-default-modules.patch deleted file mode 100644 index 5bde5f8..0000000 --- a/mbs-PR1458-only-use-default-modules.patch +++ /dev/null @@ -1,61 +0,0 @@ -diff --git a/module_build_service/scheduler/default_modules.py b/module_build_service/scheduler/default_modules.py -index 2225878..2bced95 100644 ---- a/module_build_service/scheduler/default_modules.py -+++ b/module_build_service/scheduler/default_modules.py -@@ -84,22 +84,23 @@ def add_default_modules(db_session, mmd, arches): - ) - continue - -- try: -- # We are reusing resolve_requires instead of directly querying the database since it -- # provides the exact format that is needed for mbs.xmd.buildrequires. -- # -- # Only one default module is processed at a time in resolve_requires so that we -- # are aware of which modules are not in the database, and can add those that are as -- # buildrequires. -- resolver = GenericResolver.create(db_session, conf) -- resolved = resolver.resolve_requires([ns]) -- except UnprocessableEntity: -+ # Query for the latest default module that was built against this base module -+ resolver = GenericResolver.create(db_session, conf) -+ default_module_mmds = resolver.get_buildrequired_modulemds(name, stream, bm_mmd) -+ if not default_module_mmds: - log.warning( - "The default module %s from %s is not in the database and couldn't be added as " - "a buildrequire", - ns, bm_nsvc, - ) - continue -+ # Since a default module entry only has the name and stream, there's no way to know -+ # which context to pick from if multiple are present. In this case, just pick the first -+ # one, which is the latest version but potentially a random context. -+ default_module_mmd = default_module_mmds[0] -+ # Use resolve_requires since it provides the exact format that is needed for -+ # mbs.xmd.buildrequires -+ resolved = resolver.resolve_requires([default_module_mmd.get_nsvc()]) - - nsvc = ":".join([name, stream, resolved[name]["version"], resolved[name]["context"]]) - log.info("Adding the default module %s as a buildrequire", nsvc) -diff --git a/tests/test_scheduler/test_default_modules.py b/tests/test_scheduler/test_default_modules.py -index 5e8f288..46d53f7 100644 ---- a/tests/test_scheduler/test_default_modules.py -+++ b/tests/test_scheduler/test_default_modules.py -@@ -21,8 +21,6 @@ def test_add_default_modules(mock_get_dm, mock_hc, db_session): - Test that default modules present in the database are added, and the others are ignored. - """ - clean_database() -- make_module_in_db("python:3:12345:1", db_session=db_session) -- make_module_in_db("nodejs:11:2345:2", db_session=db_session) - mmd = load_mmd(read_staged_data("formatted_testmodule.yaml")) - xmd_brs = mmd.get_xmd()["mbs"]["buildrequires"] - assert set(xmd_brs.keys()) == {"platform"} -@@ -40,6 +38,9 @@ def test_add_default_modules(mock_get_dm, mock_hc, db_session): - platform_xmd["mbs"]["use_default_modules"] = True - platform_mmd.set_xmd(platform_xmd) - platform.modulemd = mmd_to_str(platform_mmd) -+ -+ make_module_in_db("python:3:12345:1", base_module=platform, db_session=db_session) -+ make_module_in_db("nodejs:11:2345:2", base_module=platform, db_session=db_session) - db_session.commit() - - mock_get_dm.return_value = { diff --git a/mbs-PR1709-fix-scratch-build-suffix.patch b/mbs-PR1709-fix-scratch-build-suffix.patch deleted file mode 100644 index 4c1ad76..0000000 --- a/mbs-PR1709-fix-scratch-build-suffix.patch +++ /dev/null @@ -1,148 +0,0 @@ -From 13d00928acf86f49fcced6cc034f663e8f2bbf64 Mon Sep 17 00:00:00 2001 -From: Martin Curlej -Date: Jun 04 2021 13:16:38 +0000 -Subject: Fixed scratch build suffix bug - - -When building a scratch build of a module with static context -the scratch suffix was added twice. - -Signed-off-by: Martin Curlej - ---- - -diff --git a/module_build_service/web/submit.py b/module_build_service/web/submit.py ---- a/module_build_service/web/submit.py -+++ b/module_build_service/web/submit.py -@@ -701,7 +701,9 @@ def submit_module_build(db_session, username, stream_or_packager, params, module_stream_version): - -- xmd = mmd.get_xmd() -- if xmd["mbs"].get("static_context"): -+ if static_context: -+ # if the static_context is True we use the context from defined in the mmd -+ # and discard the computed one. - module.context = mmd.get_context() -- -- module.context += context_suffix -+ else: -+ # if the context is defined by MSE, we need to add a context_suffix if it exists. -+ module.context += context_suffix - db_session.commit() -@@ -769,10 +769,15 @@ def process_module_context_configuration(stream_or_packager): - return streams, static_context - else: - xmd = stream_or_packager.get_xmd() -- # check if we are handling rebuild of a static context module -+ -+ # check if the static format is defined through `static_context` field -+ if stream_or_packager.is_static_context(): -+ static_context = True -+ return [stream_or_packager], static_context -+ -+ # check if we are handling rebuild of a static context module defined in xmd - if "mbs" in xmd: -- # check if it is a static context -- if "static_context" in xmd["mbs"] or stream_or_packager.is_static_context(): -+ if "static_context" in xmd["mbs"]: - static_context = True - return [stream_or_packager], static_context - -diff --git a/tests/test_web/test_submit.py b/tests/test_web/test_submit.py -index a2f548f..b5e3869 100644 ---- a/tests/test_web/test_submit.py -+++ b/tests/test_web/test_submit.py -@@ -25,10 +25,15 @@ from tests import ( - make_module, - read_staged_data, - init_data, -+ clean_database, - ) - - - class TestSubmit: -+ -+ def teardown_method(self, tested_method): -+ clean_database() -+ - def test_get_prefixed_version_f28(self): - scheduler_init_data(1) - build_one = models.ModuleBuild.get_by_id(db_session, 2) -@@ -144,6 +149,77 @@ class TestSubmit: - assert "mbs_options" not in xmd - assert xmd["mbs"]["static_context"] - -+ def test_submit_build_module_scratch_v3_static_context(self): -+ """ -+ Test if the static context in the v3 metadata format will contain the correct suffix -+ during a scratch build -+ """ -+ init_data(multiple_stream_versions=True) -+ yaml_str = read_staged_data("v3/mmd_packager") -+ mmd = load_mmd(yaml_str) -+ ux_timestamp = "1613048427" -+ version = provide_module_stream_version_from_timestamp(ux_timestamp) -+ params = {"scratch": True} -+ -+ builds = submit_module_build(db_session, "foo", mmd, params, version) -+ -+ assert len(builds) == 2 -+ -+ expected_contexts = {"CTX1_1": {}, "CTX2_1": {}} -+ -+ for build in builds: -+ mmd = build.mmd() -+ context = mmd.get_context() -+ assert context in expected_contexts -+ -+ def test_submit_build_module_scratch_v2_static_context(self): -+ """ -+ Test if the static context in the v2 metadata format will contain -+ the correct suffix during a scratch build -+ """ -+ scheduler_init_data(1) -+ yaml_str = read_staged_data("static_context_v2") -+ mmd = load_mmd(yaml_str) -+ ux_timestamp = "1613048427" -+ version = provide_module_stream_version_from_timestamp(ux_timestamp) -+ params = {"scratch": True} -+ -+ builds = submit_module_build(db_session, "app", mmd, params, version) -+ -+ assert len(builds) == 2 -+ -+ expected_contexts = {"context1_1": {}, "context2_1": {}} -+ -+ for build in builds: -+ mmd = build.mmd() -+ context = mmd.get_context() -+ assert context in expected_contexts -+ -+ def test_submit_build_module_scratch_increment(self): -+ """ -+ Test if the context suffix is incremented correctly during a repeated scratch build. -+ """ -+ init_data(multiple_stream_versions=True) -+ yaml_str = read_staged_data("v3/mmd_packager") -+ mmd = load_mmd(yaml_str) -+ ux_timestamp = "1613048427" -+ version = provide_module_stream_version_from_timestamp(ux_timestamp) -+ params = {"scratch": True} -+ -+ builds = submit_module_build(db_session, "foo", mmd, params, version) -+ -+ assert len(builds) == 2 -+ -+ builds = submit_module_build(db_session, "foo", mmd, params, version) -+ -+ assert len(builds) == 2 -+ -+ expected_contexts = {"CTX1_2": {}, "CTX2_2": {}} -+ for build in builds: -+ mmd = build.mmd() -+ context = mmd.get_context() -+ assert context in expected_contexts -+ - - class TestProcessModuleContextConfiguration: - """ - diff --git a/mbs-PR1711-enforce-sane-module-state-transitions.patch b/mbs-PR1711-enforce-sane-module-state-transitions.patch deleted file mode 100644 index d477ae1..0000000 --- a/mbs-PR1711-enforce-sane-module-state-transitions.patch +++ /dev/null @@ -1,302 +0,0 @@ -From 0d8ab270ea422c8a1bc771d54d4d3e23c7ffe53b Mon Sep 17 00:00:00 2001 -From: Mike McLean -Date: Jun 10 2021 17:44:55 +0000 -Subject: [PATCH 1/3] strict_module_state_transitions config option - - -Fixes: https://pagure.io/fm-orchestrator/issue/1678 - ---- - -diff --git a/module_build_service/common/config.py b/module_build_service/common/config.py -index 91bbb33..22d55de 100644 ---- a/module_build_service/common/config.py -+++ b/module_build_service/common/config.py -@@ -747,6 +747,11 @@ class Config(object): - ], - "desc": "The list Python paths for the Celery application to import.", - }, -+ "strict_module_state_transitions": { -+ "type": bool, -+ "default": True, -+ "desc": "Whether to strictly enforce module state transitions", -+ }, - } - - def __init__(self, conf_section_obj): -diff --git a/module_build_service/scheduler/handlers/modules.py b/module_build_service/scheduler/handlers/modules.py -index 3e8d21f..3b858dd 100644 ---- a/module_build_service/scheduler/handlers/modules.py -+++ b/module_build_service/scheduler/handlers/modules.py -@@ -61,8 +61,17 @@ def failed(msg_id, module_build_id, module_build_state): - "Note that retrieved module state %r doesn't match message module state %r", - build.state, module_build_state, - ) -- # This is ok.. it's a race condition we can ignore. -- pass -+ -+ if conf.strict_module_state_transitions: -+ valid_states = ( -+ models.BUILD_STATES["init"], -+ models.BUILD_STATES["wait"], -+ models.BUILD_STATES["build"], -+ models.BUILD_STATES["failed"], -+ ) -+ if build.state not in valid_states: -+ log.error("Module failed handler called while module in state %r", build.state) -+ return - - if build.koji_tag: - builder = GenericBuilder.create_from_module(db_session, build, conf) -@@ -123,8 +132,15 @@ def done(msg_id, module_build_id, module_build_state): - "Note that retrieved module state %r doesn't match message module state %r", - build.state, module_build_state, - ) -- # This is ok.. it's a race condition we can ignore. -- pass -+ -+ if conf.strict_module_state_transitions: -+ valid_states = ( -+ models.BUILD_STATES["build"], -+ models.BUILD_STATES["done"], -+ ) -+ if build.state not in valid_states: -+ log.error("Module done handler called while module in state %r", build.state) -+ return - - # Scratch builds stay in 'done' state - if not build.scratch: -@@ -349,8 +365,15 @@ def wait(msg_id, module_build_id, module_build_state): - "Note that retrieved module state %r doesn't match message module state %r", - build.state, module_build_state, - ) -- # This is ok.. it's a race condition we can ignore. -- pass -+ -+ if conf.strict_module_state_transitions: -+ valid_states = ( -+ models.BUILD_STATES["init"], -+ models.BUILD_STATES["wait"], -+ ) -+ if build.state not in valid_states: -+ log.error("Module wait handler called while module in state %r", build.state) -+ return - - try: - build_deps = get_module_build_dependencies(build) - -From 57359dfd66c9f811265f53a549568329b9c2d58b Mon Sep 17 00:00:00 2001 -From: Mike McLean -Date: Jun 10 2021 17:47:28 +0000 -Subject: [PATCH 2/3] fix unit tests - - ---- - -diff --git a/tests/__init__.py b/tests/__init__.py -index ca188fa..c995d6b 100644 ---- a/tests/__init__.py -+++ b/tests/__init__.py -@@ -409,7 +409,7 @@ def _populate_data(data_size=10, contexts=False, scratch=False): - db_session.commit() - - --def scheduler_init_data(tangerine_state=None, scratch=False): -+def scheduler_init_data(tangerine_state=None, scratch=False, module_state="build"): - """ Creates a testmodule in the building state with all the components in the same batch - """ - clean_database() -@@ -421,7 +421,7 @@ def scheduler_init_data(tangerine_state=None, scratch=False): - name="testmodule", - stream="master", - version='20170109091357', -- state=BUILD_STATES["build"], -+ state=BUILD_STATES[module_state], - scratch=scratch, - build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", - runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", -diff --git a/tests/test_build/test_build.py b/tests/test_build/test_build.py -index 809e3e0..104c49d 100644 ---- a/tests/test_build/test_build.py -+++ b/tests/test_build/test_build.py -@@ -1152,16 +1152,24 @@ class TestBuild(BaseTestBuild): - - from module_build_service.scheduler.db_session import db_session - -+ # module should be in wait state for this test -+ module_build = models.ModuleBuild.get_by_id(db_session, 3) -+ module_build.state = models.BUILD_STATES["wait"] -+ db_session.commit() -+ - # Create a dedicated database session for scheduler to avoid hang - self.run_scheduler( - msgs=[{ - "msg_id": "local module build", - "event": events.MBS_MODULE_STATE_CHANGE, -- "module_build_id": 3, -- "module_build_state": 1 -+ "module_build_id": module_build.id, -+ "module_build_state": module_build.state, - }] - ) - -+ # commit so that our assertions see the updates -+ db_session.commit() -+ - reused_component_ids = { - "module-build-macros": None, - "tangerine": 3, -@@ -1239,6 +1247,11 @@ class TestBuild(BaseTestBuild): - - FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb - -+ # module should be in wait state for this test -+ module_build = models.ModuleBuild.get_by_id(db_session, 3) -+ module_build.state = models.BUILD_STATES["wait"] -+ db_session.commit() -+ - self.run_scheduler( - msgs=[{ - "msg_id": "local module build", -@@ -1248,6 +1261,9 @@ class TestBuild(BaseTestBuild): - }] - ) - -+ # commit so that our assertions see the updates -+ db_session.commit() -+ - # All components should be built and module itself should be in "done" - # or "ready" state. - for build in models.ModuleBuild.get_by_id(db_session, 3).component_builds: -diff --git a/tests/test_scheduler/test_module_wait.py b/tests/test_scheduler/test_module_wait.py -index 4a31d71..7257c7b 100644 ---- a/tests/test_scheduler/test_module_wait.py -+++ b/tests/test_scheduler/test_module_wait.py -@@ -21,7 +21,7 @@ base_dir = os.path.dirname(os.path.dirname(__file__)) - - class TestModuleWait: - def setup_method(self, test_method): -- scheduler_init_data() -+ scheduler_init_data(module_state="wait") - - self.config = conf - self.session = mock.Mock() - -From 2283ca1760432148394693654c24f6c687c5b857 Mon Sep 17 00:00:00 2001 -From: Mike McLean -Date: Jun 14 2021 20:29:15 +0000 -Subject: [PATCH 3/3] additional unit tests for strict_module_state_transitions - - ---- - -diff --git a/tests/test_scheduler/test_module_states.py b/tests/test_scheduler/test_module_states.py -new file mode 100644 -index 0000000..f9b6d0e ---- /dev/null -+++ b/tests/test_scheduler/test_module_states.py -@@ -0,0 +1,103 @@ -+# -*- coding: utf-8 -*- -+# SPDX-License-Identifier: MIT -+from __future__ import absolute_import -+import os -+ -+import mock -+from mock import patch -+import pytest -+ -+from module_build_service.common import build_logs, conf, models -+import module_build_service.resolver -+from module_build_service.scheduler.db_session import db_session -+import module_build_service.scheduler.handlers.modules -+from tests import scheduler_init_data, clean_database -+ -+base_dir = os.path.dirname(os.path.dirname(__file__)) -+ -+ -+class TestModuleStateChecks: -+ def setup_method(self, test_method): -+ clean_database() -+ self.config = conf -+ self.session = mock.Mock() -+ conf.strict_module_state_transitions = True -+ -+ def teardown_method(self, test_method): -+ try: -+ path = build_logs.path(db_session, 1) -+ os.remove(path) -+ except Exception: -+ pass -+ -+ @pytest.mark.parametrize( -+ "bad_state", -+ ["build", "done", "failed", "ready", "garbage"], -+ ) -+ @patch("module_build_service.builder.GenericBuilder.create_from_module") -+ def test_wait_state_validation(self, create_builder, bad_state): -+ scheduler_init_data(module_state=bad_state) -+ build = models.ModuleBuild.get_by_id(db_session, 2) -+ # make sure we have the right build -+ assert build.state == models.BUILD_STATES[bad_state] -+ assert build.version == "20170109091357" -+ with patch("module_build_service.resolver.GenericResolver.create"): -+ module_build_service.scheduler.handlers.modules.wait( -+ msg_id="msg-id-1", -+ module_build_id=build.id, -+ module_build_state=models.BUILD_STATES["wait"]) -+ -+ # the handler should exit early for these bad states -+ create_builder.assert_not_called() -+ -+ # build state should not be changed -+ build = models.ModuleBuild.get_by_id(db_session, build.id) -+ assert build.state == models.BUILD_STATES[bad_state] -+ -+ @pytest.mark.parametrize( -+ "bad_state", -+ ["done", "ready", "garbage"], -+ ) -+ @patch("module_build_service.builder.GenericBuilder.create_from_module") -+ def test_failed_state_validation(self, create_builder, bad_state): -+ scheduler_init_data(module_state=bad_state) -+ build = models.ModuleBuild.get_by_id(db_session, 2) -+ # make sure we have the right build -+ assert build.state == models.BUILD_STATES[bad_state] -+ assert build.version == "20170109091357" -+ with patch("module_build_service.resolver.GenericResolver.create"): -+ module_build_service.scheduler.handlers.modules.failed( -+ msg_id="msg-id-1", -+ module_build_id=build.id, -+ module_build_state=models.BUILD_STATES["wait"]) -+ -+ # the handler should exit early for these bad states -+ create_builder.assert_not_called() -+ -+ # build state should not be changed -+ build = models.ModuleBuild.get_by_id(db_session, build.id) -+ assert build.state == models.BUILD_STATES[bad_state] -+ -+ @pytest.mark.parametrize( -+ "bad_state", -+ ["init", "wait", "failed", "ready", "garbage"], -+ ) -+ @patch("module_build_service.builder.GenericBuilder.clear_cache") -+ def test_done_state_validation(self, clear_cache, bad_state): -+ scheduler_init_data(module_state=bad_state) -+ build = models.ModuleBuild.get_by_id(db_session, 2) -+ # make sure we have the right build -+ assert build.state == models.BUILD_STATES[bad_state] -+ assert build.version == "20170109091357" -+ with patch("module_build_service.resolver.GenericResolver.create"): -+ module_build_service.scheduler.handlers.modules.done( -+ msg_id="msg-id-1", -+ module_build_id=build.id, -+ module_build_state=models.BUILD_STATES["done"]) -+ -+ # the handler should exit early for these bad states -+ clear_cache.assert_not_called() -+ -+ # build state should not be changed -+ build = models.ModuleBuild.get_by_id(db_session, build.id) -+ assert build.state == models.BUILD_STATES[bad_state] - diff --git a/mbs-PR1718-nudge-reused-components.patch b/mbs-PR1718-nudge-reused-components.patch deleted file mode 100644 index e4a54c3..0000000 --- a/mbs-PR1718-nudge-reused-components.patch +++ /dev/null @@ -1,55 +0,0 @@ -From a1494e66aa61334ad2bb9b79e8989a94c100b165 Mon Sep 17 00:00:00 2001 -From: Mike McLean -Date: Jul 07 2021 22:22:24 +0000 -Subject: also nudge reused components after 10 minutes - - ---- - -diff --git a/module_build_service/scheduler/producer.py b/module_build_service/scheduler/producer.py -index bdf1ae0..b6f08f4 100644 ---- a/module_build_service/scheduler/producer.py -+++ b/module_build_service/scheduler/producer.py -@@ -105,6 +105,7 @@ def fail_lost_builds(): - # TODO re-use - - if conf.system == "koji": -+ ten_minutes = timedelta(minutes=10) - # We don't do this on behalf of users - koji_session = get_session(conf, login=False) - log.info("Querying tasks for statuses:") -@@ -119,15 +120,17 @@ def fail_lost_builds(): - if not component_build.task_id: - continue - -- # Don't check tasks for components which have been reused, -- # they may have BUILDING state temporarily before we tag them -- # to new module tag. Checking them would be waste of resources. -+ # For components which have been reused, wait 10 minutes before checking. -+ # They may be in BUILDING state temporarily before we tag them -+ # to new module tag. Checking them at that point would be waste of resources. - if component_build.reused_component_id: -- log.debug( -- 'Skipping check for task "%s", the component has been reused ("%s").', -- component_build.task_id, component_build.reused_component_id -- ) -- continue -+ now = datetime.utcnow() -+ if (now - component_build.module_build.time_modified) < ten_minutes: -+ log.debug( -+ 'Skipping check for task "%s", the component has been reused ("%s").', -+ component_build.task_id, component_build.reused_component_id -+ ) -+ continue - - task_id = component_build.task_id - -@@ -165,6 +168,7 @@ def fail_lost_builds(): - build_name=component_build.package, - build_release=build_release, - build_version=build_version, -+ module_build_id=component_build.module_build.id, - ) - - elif conf.system == "mock": - diff --git a/mbs-PR1729-dont-filter-runtime-deps.patch b/mbs-PR1729-dont-filter-runtime-deps.patch deleted file mode 100644 index ae16681..0000000 --- a/mbs-PR1729-dont-filter-runtime-deps.patch +++ /dev/null @@ -1,61 +0,0 @@ -From 85e5b9c738f4b415ec4d6aab2e273bbddb39e4a0 Mon Sep 17 00:00:00 2001 -From: Mike McLean -Date: Aug 26 2021 19:54:10 +0000 -Subject: [PATCH 1/2] don't filter runtime deps for packagerV3 case - - -Fixes: https://pagure.io/fm-orchestrator/issue/1714 - ---- - -diff --git a/module_build_service/web/submit.py b/module_build_service/web/submit.py -index c6b816f..b9b2dca 100644 ---- a/module_build_service/web/submit.py -+++ b/module_build_service/web/submit.py -@@ -789,20 +789,6 @@ def process_module_context_configuration(stream_or_packager): - if not stream.is_static_context(): - stream.set_static_context() - -- # we get the dependenices of the stream -- deps = stream.get_dependencies() -- # with v3 packager format the output v2 stream will always have -- # only one set of dependecies. We need to remove the platform -- # virtual module from runtime dependencies as it is not desired. -- modules = deps[0].get_runtime_modules() -- module_streams = [(m, deps[0].get_runtime_streams(m)[0]) for m in modules -- if m not in conf.base_module_names] -- deps[0].clear_runtime_dependencies() -- -- for module_stream in module_streams: -- module, stream = module_stream -- deps[0].add_runtime_stream(module, stream) -- - return streams, static_context - else: - xmd = stream_or_packager.get_xmd() - -From 2f324d2ba15fbfe62b565299f3f77edf25b71d0c Mon Sep 17 00:00:00 2001 -From: Mike McLean -Date: Aug 26 2021 21:07:23 +0000 -Subject: [PATCH 2/2] fix unit test - - ---- - -diff --git a/tests/test_web/test_submit.py b/tests/test_web/test_submit.py -index b5e3869..fb7aa57 100644 ---- a/tests/test_web/test_submit.py -+++ b/tests/test_web/test_submit.py -@@ -120,9 +120,9 @@ class TestSubmit: - assert len(builds) == 2 - - expected_deps = {"CTX1": {"buildrequires": {"platform": ["f28"]}, -- "requires": {"nginx": ["1"]}}, -+ "requires": {"nginx": ["1"], "platform": ["f28"]}}, - "CTX2": {"buildrequires": {"platform": ["f29.2.0"]}, -- "requires": {}}} -+ "requires": {"platform": ["f29"]}}} - - for build in builds: - mmd = build.mmd() - diff --git a/mbs-PR1755-libmodulemd-workaround.patch b/mbs-PR1755-libmodulemd-workaround.patch deleted file mode 100644 index 1611db6..0000000 --- a/mbs-PR1755-libmodulemd-workaround.patch +++ /dev/null @@ -1,289 +0,0 @@ -From e8d0874ca63d0ad02978d82741439841bc1a7905 Mon Sep 17 00:00:00 2001 -From: Brendan Reilly -Date: Jul 13 2022 16:09:21 +0000 -Subject: Avoid libmodulemd symbol clash - - -Importing dnf in the MBS process causes a symbol clash on RHEL 7. This -is a temporary fix to avoid that, and should not be merged. - -See https://pagure.io/releng/issue/10850 - ---- - -diff --git a/module_build_service/builder/MockModuleBuilder.py b/module_build_service/builder/MockModuleBuilder.py -index df9a1b8..831f149 100644 ---- a/module_build_service/builder/MockModuleBuilder.py -+++ b/module_build_service/builder/MockModuleBuilder.py -@@ -8,7 +8,6 @@ import re - import subprocess - import threading - --import dnf - import koji - import kobo.rpmlib - import platform -@@ -182,6 +181,10 @@ def get_local_releasever(): - """ - Returns the $releasever variable used in the system when expanding .repo files. - """ -+ # import dnf in function to avoid symbol name clashing -+ # see: https://pagure.io/releng/issue/10850 -+ import dnf -+ - dnf_base = dnf.Base() - return dnf_base.conf.releasever - -@@ -196,6 +199,10 @@ def import_builds_from_local_dnf_repos(platform_id=None): - :param str platform_id: The `name:stream` of a fake platform module to generate in this - method. When not set, the /etc/os-release is parsed to get the PLATFORM_ID. - """ -+ # import dnf in function to avoid symbol name clashing -+ # see: https://pagure.io/releng/issue/10850 -+ import dnf -+ - log.info("Loading available RPM repositories.") - dnf_base = dnf.Base() - dnf_base.read_all_repos() -diff --git a/module_build_service/scheduler/default_modules.py b/module_build_service/scheduler/default_modules.py -index 6846d13..46d73f0 100644 ---- a/module_build_service/scheduler/default_modules.py -+++ b/module_build_service/scheduler/default_modules.py -@@ -5,8 +5,8 @@ import errno - import os - import shutil - import tempfile -+import subprocess - --import dnf - import kobo.rpmlib - import koji - import six.moves.xmlrpc_client as xmlrpclib -@@ -330,91 +332,10 @@ def _get_rpms_from_tags(koji_session, tags, arches): - - return nevras - -- - def _get_rpms_in_external_repo(repo_url, arches, cache_dir_name): -- """ -- Get the available RPMs in the external repo for the provided arches. -- -- :param str repo_url: the URL of the external repo with the "$arch" variable included -- :param list arches: the list of arches to query the external repo for -- :param str cache_dir_name: the cache directory name under f"{conf.cache_dir}/dnf" -- :return: a set of the RPM NEVRAs -- :rtype: set -- :raise RuntimeError: if the cache is not writeable or the external repo couldn't be loaded -- :raises ValueError: if there is no "$arch" variable in repo URL -- """ -- if "$arch" not in repo_url: -- raise ValueError( -- "The external repo {} does not contain the $arch variable".format(repo_url) -- ) -- -- base = dnf.Base() -- try: -- dnf_conf = base.conf -- # Expire the metadata right away so that when a repo is loaded, it will always check to -- # see if the external repo has been updated -- dnf_conf.metadata_expire = 0 -+ # Calling an external script using subprocess to avoid importing dnf -+ # See: https://pagure.io/releng/issue/10850 -+ nevras = subprocess.check_output(['mbs-get-rpms-in-external-repo', repo_url, cache_dir_name, -+ conf.cache_dir, str(conf.dnf_timeout), str(conf.dnf_minrate)] + arches) - -- cache_location = os.path.join(conf.cache_dir, "dnf", cache_dir_name) -- try: -- # exist_ok=True can't be used in Python 2 -- os.makedirs(cache_location, mode=0o0770) -- except OSError as e: -- # Don't fail if the directories already exist -- if e.errno != errno.EEXIST: -- log.exception("Failed to create the cache directory %s", cache_location) -- raise RuntimeError("The MBS cache is not writeable.") -- -- # Tell DNF to use the cache directory -- dnf_conf.cachedir = cache_location -- # Don't skip repos that can't be synchronized -- dnf_conf.skip_if_unavailable = False -- dnf_conf.timeout = conf.dnf_timeout -- # Get rid of everything to be sure it's a blank slate. This doesn't delete the cached repo -- # data. -- base.reset(repos=True, goal=True, sack=True) -- -- # Add a separate repo for each architecture -- for arch in arches: -- # Convert arch to canon_arch. This handles cases where Koji "i686" arch is mapped to -- # "i386" when generating RPM repository. -- canon_arch = koji.canonArch(arch) -- repo_name = "repo_{}".format(canon_arch) -- repo_arch_url = repo_url.replace("$arch", canon_arch) -- base.repos.add_new_repo( -- repo_name, dnf_conf, baseurl=[repo_arch_url], minrate=conf.dnf_minrate, -- ) -- -- try: -- # Load the repos in parallel -- base.update_cache() -- except dnf.exceptions.RepoError: -- msg = "Failed to load the external repos" -- log.exception(msg) -- raise RuntimeError(msg) -- -- # dnf will not always raise an error on repo failures, so we check explicitly -- for repo_name in base.repos: -- if not base.repos[repo_name].metadata: -- msg = "Failed to load metadata for repo %s" % repo_name -- log.exception(msg) -- raise RuntimeError(msg) -- -- base.fill_sack(load_system_repo=False) -- -- # Return all the available RPMs -- nevras = set() -- for rpm in base.sack.query().available(): -- rpm_dict = { -- "arch": rpm.arch, -- "epoch": rpm.epoch, -- "name": rpm.name, -- "release": rpm.release, -- "version": rpm.version, -- } -- nevra = kobo.rpmlib.make_nvra(rpm_dict, force_epoch=True) -- nevras.add(nevra) -- finally: -- base.close() -- -- return nevras -+ return set(nevras.split()) -diff --git a/module_build_service_workarounds/__init__.py b/module_build_service_workarounds/__init__.py -new file mode 100644 -index 0000000..e69de29 ---- /dev/null -+++ b/module_build_service_workarounds/__init__.py -diff --git a/module_build_service_workarounds/externalrepo.py b/module_build_service_workarounds/externalrepo.py -new file mode 100755 -index 0000000..a3e234b ---- /dev/null -+++ b/module_build_service_workarounds/externalrepo.py -@@ -0,0 +1,107 @@ -+import os -+import sys -+assert "gi" not in sys.modules -+import errno -+import koji -+import kobo.rpmlib -+import dnf -+ -+# Moved _get_rpms_in_external_repo logic to a separate script to avoid symbol clash with dnf -+# See: https://pagure.io/releng/issue/10850 -+ -+def main(argv=sys.argv): -+#def main(repo_url, cache_dir_name, cache_dir, dnf_timeout, dnf_minrate, arches): -+ """ -+ Get the available RPMs in the external repo for the provided arches. -+ -+ :param str repo_url: the URL of the external repo with the "$arch" variable included -+ :param list arches: the list of arches to query the external repo for -+ :param str cache_dir_name: the cache directory name under f"{conf.cache_dir}/dnf" -+ :return: a set of the RPM NEVRAs -+ :rtype: set -+ :raise RuntimeError: if the cache is not writeable or the external repo couldn't be loaded -+ :raises ValueError: if there is no "$arch" variable in repo URL -+ """ -+ -+ repo_url = argv[1] -+ cache_dir_name = argv[2] -+ cache_dir = argv[3] -+ dnf_timeout = argv[4] -+ dnf_minrate = argv[5] -+ arches = argv[6:] -+ -+ if "$arch" not in repo_url: -+ raise ValueError( -+ "The external repo {} does not contain the $arch variable".format(repo_url) -+ ) -+ -+ base = dnf.Base() -+ try: -+ dnf_conf = base.conf -+ # Expire the metadata right away so that when a repo is loaded, it will always check to -+ # see if the external repo has been updated -+ dnf_conf.metadata_expire = 0 -+ -+ cache_location = os.path.join(cache_dir, "dnf", cache_dir_name) -+ try: -+ # exist_ok=True can't be used in Python 2 -+ os.makedirs(cache_location, mode=0o0770) -+ except OSError as e: -+ # Don't fail if the directories already exist -+ if e.errno != errno.EEXIST: -+ #log.exception("Failed to create the cache directory %s", cache_location) -+ raise RuntimeError("The MBS cache is not writeable.") -+ -+ # Tell DNF to use the cache directory -+ dnf_conf.cachedir = cache_location -+ # Don't skip repos that can't be synchronized -+ dnf_conf.skip_if_unavailable = False -+ dnf_conf.timeout = int(dnf_timeout) -+ # Get rid of everything to be sure it's a blank slate. This doesn't delete the cached repo -+ # data. -+ base.reset(repos=True, goal=True, sack=True) -+ -+ # Add a separate repo for each architecture -+ for arch in arches: -+ # Convert arch to canon_arch. This handles cases where Koji "i686" arch is mapped to -+ # "i386" when generating RPM repository. -+ canon_arch = koji.canonArch(arch) -+ repo_name = "repo_{}".format(canon_arch) -+ repo_arch_url = repo_url.replace("$arch", canon_arch) -+ base.repos.add_new_repo( -+ repo_name, dnf_conf, baseurl=[repo_arch_url], minrate=int(dnf_minrate), -+ ) -+ -+ try: -+ # Load the repos in parallel -+ base.update_cache() -+ except dnf.exceptions.RepoError: -+ msg = "Failed to load the external repos" -+ #log.exception(msg) -+ raise RuntimeError(msg) -+ -+ # dnf will not always raise an error on repo failures, so we check explicitly -+ for repo_name in base.repos: -+ if not base.repos[repo_name].metadata: -+ msg = "Failed to load metadata for repo %s" % repo_name -+ #log.exception(msg) -+ raise RuntimeError(msg) -+ -+ base.fill_sack(load_system_repo=False) -+ -+ # Return all the available RPMs -+ nevras = set() -+ for rpm in base.sack.query().available(): -+ rpm_dict = { -+ "arch": rpm.arch, -+ "epoch": rpm.epoch, -+ "name": rpm.name, -+ "release": rpm.release, -+ "version": rpm.version, -+ } -+ nevra = kobo.rpmlib.make_nvra(rpm_dict, force_epoch=True) -+ nevras.add(nevra) -+ finally: -+ base.close() -+ -+ print(" ".join(nevras)) -diff --git a/setup.py b/setup.py -index 36b8c2e..21b889c 100644 ---- a/setup.py -+++ b/setup.py -@@ -42,6 +42,7 @@ setup( - "mbs-upgradedb = module_build_service.manage:upgradedb", - "mbs-frontend = module_build_service.manage:run", - "mbs-manager = module_build_service.manage:manager_wrapper", -+ "mbs-get-rpms-in-external-repo = module_build_service_workarounds.externalrepo:main" - ], - "moksha.consumer": "mbsconsumer = module_build_service.scheduler.consumer:MBSConsumer", - "mbs.messaging_backends": [ - diff --git a/mbs_fedora_click.patch b/mbs_fedora_click.patch new file mode 100644 index 0000000..9947dae --- /dev/null +++ b/mbs_fedora_click.patch @@ -0,0 +1,384 @@ +diff --git a/module_build_service/manage.py b/module_build_service/manage.py +index 925b5031..1215bdba 100755 +--- a/module_build_service/manage.py ++++ b/module_build_service/manage.py +@@ -5,10 +5,9 @@ +-from functools import wraps ++import click + import getpass + import logging + import os + import sys +-import textwrap + + import flask_migrate +-from flask_script import Manager, prompt_bool ++from flask.cli import FlaskGroup + from werkzeug.datastructures import FileStorage +@@ -21,74 +20,34 @@ from module_build_service.builder.MockModuleBuilder import ( + from module_build_service.common.errors import StreamAmbigous, StreamNotXyz +-from module_build_service.common.logger import level_flags + from module_build_service.common.utils import load_mmd_file, import_mmd + import module_build_service.scheduler.consumer + from module_build_service.scheduler.db_session import db_session + import module_build_service.scheduler.local + from module_build_service.web.submit import submit_module_build_from_yaml + +- +-def create_app(debug=False, verbose=False, quiet=False): +- # logging (intended for flask-script, see manage.py) +- log = logging.getLogger(__name__) +- if debug: +- log.setLevel(level_flags["debug"]) +- elif verbose: +- log.setLevel(level_flags["verbose"]) +- elif quiet: +- log.setLevel(level_flags["quiet"]) +- +- return app +- +- +-manager = Manager(create_app) +-help_args = ("-?", "--help") +-manager.help_args = help_args +-migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), +- 'migrations') ++migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "migrations") + migrate = flask_migrate.Migrate(app, db, directory=migrations_dir) +-manager.add_command("db", flask_migrate.MigrateCommand) +-manager.add_option("-d", "--debug", dest="debug", action="store_true") +-manager.add_option("-v", "--verbose", dest="verbose", action="store_true") +-manager.add_option("-q", "--quiet", dest="quiet", action="store_true") + + +-def console_script_help(f): +- @wraps(f) +- def wrapped(*args, **kwargs): +- import sys ++@click.group(cls=FlaskGroup, create_app=lambda *args, **kwargs: app) ++def cli(): ++ """MBS manager""" + +- if any([arg in help_args for arg in sys.argv[1:]]): +- command = os.path.basename(sys.argv[0]) +- print(textwrap.dedent( +- """\ +- {0} +- +- Usage: {0} [{1}] +- +- See also: +- mbs-manager(1) +- """).strip().format(command, "|".join(help_args)) +- ) +- sys.exit(2) +- r = f(*args, **kwargs) +- return r + +- return wrapped +- +- +-@console_script_help +-@manager.command ++@cli.command("upgradedb") + def upgradedb(): + """ Upgrades the database schema to the latest revision + """ + app.config["SERVER_NAME"] = "localhost" +- # TODO: configurable? +- migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "migrations") + with app.app_context(): + flask_migrate.upgrade(directory=migrations_dir) + + +-@manager.command ++def upgradedb_entrypoint(): ++ """Entrypoint for command mbs-upgradedb""" ++ upgradedb() ++ ++ ++@cli.command("cleardb") + def cleardb(): + """ Clears the database + """ +@@ -96,8 +53,8 @@ def cleardb(): + models.ComponentBuild.query.delete() + + +-@console_script_help +-@manager.command ++@cli.command("import_module") ++@click.argument("mmd_file", type=click.Path(exists=True)) + def import_module(mmd_file): + """ Imports the module from mmd_file + """ +@@ -117,41 +74,56 @@ def import_module(mmd_file): + return collected + + +-@manager.option("--stream", action="store", dest="stream") +-@manager.option("--file", action="store", dest="yaml_file") +-@manager.option("--srpm", action="append", default=[], dest="srpms", metavar="SRPM") +-@manager.option("--skiptests", action="store_true", dest="skiptests") +-@manager.option("--offline", action="store_true", dest="offline") +-@manager.option( +- '--buildrequires', action='append', metavar='name:stream', +- dest='buildrequires', default=[], +- help='Buildrequires to override in the form of "name:stream"' +-) +-@manager.option( +- '--requires', action='append', metavar='name:stream', +- dest='requires', default=[], +- help='Requires to override in the form of "name:stream"' +-) +-@manager.option("-d", "--debug", action="store_true", dest="log_debug") +-@manager.option("-l", "--add-local-build", action="append", default=None, dest="local_build_nsvs") +-@manager.option("-s", "--set-stream", action="append", default=[], dest="default_streams") +-@manager.option( +- "-r", "--platform-repo-file", action="append", default=[], dest="platform_repofiles" ++@cli.command("build_module_locally") ++@click.option("--stream", metavar="STREAM") ++@click.option( ++ "--file", "yaml_file", ++ metavar="FILE", ++ required=True, ++ type=click.Path(exists=True), ++) ++@click.option("--srpm", "srpms", metavar="SRPM", multiple=True) ++@click.option("--skiptests", is_flag=True) ++@click.option("--offline", is_flag=True) ++@click.option( ++ '--buildrequires', "buildrequires", multiple=True, ++ metavar='name:stream', default=[], ++ help='Buildrequires to override in the form of "name:stream"' ++) ++@click.option( ++ '--requires', "requires", multiple=True, ++ metavar='name:stream', default=[], ++ help='Requires to override in the form of "name:stream"' ++) ++@click.option("-d", "--debug", "log_debug", is_flag=True) ++@click.option( ++ "-l", "--add-local-build", "local_build_nsvs", ++ metavar="NSV", multiple=True ++) ++@click.option( ++ "-s", "--set-stream", "default_streams", ++ metavar="STREAM", multiple=True + ) +-@manager.option("-p", "--platform-id", action="store", default=None, dest="platform_id") ++@click.option( ++ "-r", "--platform-repo-file", "platform_repofiles", ++ metavar="FILE", ++ type=click.Path(exists=True), ++ multiple=True ++) ++@click.option("-p", "--platform-id", metavar="PLATFORM_ID") + def build_module_locally( +- local_build_nsvs=None, ++ stream=None, + yaml_file=None, + srpms=None, +- stream=None, + skiptests=False, +- default_streams=None, + offline=False, ++ log_debug=False, ++ local_build_nsvs=None, ++ default_streams=None, + platform_repofiles=None, + platform_id=None, + requires=None, + buildrequires=None, +- log_debug=False, + ): + """ Performs local module build using Mock + """ +@@ -233,14 +193,11 @@ def build_module_locally( + sys.exit(1) + + +-@manager.option( +- "identifier", +- metavar="NAME:STREAM[:VERSION[:CONTEXT]]", +- help="Identifier for selecting module builds to retire", +-) +-@manager.option( ++@cli.command("retire") ++@click.argument("identifier", metavar="NAME:STREAM[:VERSION[:CONTEXT]]") ++@click.option( + "--confirm", +- action="store_true", ++ is_flag=True, + default=False, + help="Perform retire operation without prompting", + ) +@@ -273,7 +244,8 @@ def retire(identifier, confirm=False): + logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context))) + + # Prompt for confirmation +- is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds))) ++ confirm_msg = "Retire {} module builds?".format(len(module_builds)) ++ is_confirmed = confirm or click.confirm(confirm_msg, abort=False) + if not is_confirmed: + logging.info("Module builds were NOT retired.") + return +@@ -288,8 +260,10 @@ def retire(identifier, confirm=False): + logging.info("Module builds retired.") + + +-@console_script_help +-@manager.command ++@cli.command("run") ++@click.option("-h", "--host", metavar="HOST", help="Bind to this host.") ++@click.option("-p", "--port", metavar="PORT", help="Bind to this port along with --host.") ++@click.option("-d", "--debug", is_flag=True, default=False, help="Run frontend in debug mode.") + def run(host=None, port=None, debug=None): + """ Runs the Flask app, locally. Intended for dev instances, should not be used for production. + """ +@@ -302,9 +276,5 @@ def run(host=None, port=None, debug=None): + app.run(host=host, port=port, debug=debug) + + +-def manager_wrapper(): +- manager.run() +- +- + if __name__ == "__main__": +- manager_wrapper() ++ cli() +diff --git a/requirements.txt b/requirements.txt +index 9ea96f50..17ea4df0 100644 +--- a/requirements.txt ++++ b/requirements.txt +@@ -1,10 +1,10 @@ ++click + distro + dogpile.cache + enum34 + fedmsg + Flask + Flask-Migrate +-Flask-Script + Flask-SQLAlchemy + funcsigs # Python2 only + futures # Python 2 only +diff --git a/setup.py b/setup.py +index 7eac415c..09cd3b23 100644 +--- a/setup.py ++++ b/setup.py +@@ -39,9 +39,9 @@ setup( + dependency_links=deps_links, + entry_points={ + "console_scripts": [ +- "mbs-upgradedb = module_build_service.manage:upgradedb", ++ "mbs-upgradedb = module_build_service.manage:upgradedb_entrypoint", + "mbs-frontend = module_build_service.manage:run", +- "mbs-manager = module_build_service.manage:manager_wrapper", ++ "mbs-manager = module_build_service.manage:cli", + ], + "moksha.consumer": "mbsconsumer = module_build_service.scheduler.consumer:MBSConsumer", + "mbs.messaging_backends": [ +diff --git a/tests/test_manage.py b/tests/test_manage.py +index 6d3026c8..44ab8638 100644 +--- a/tests/test_manage.py ++++ b/tests/test_manage.py +@@ -5,10 +5,9 @@ from __future__ import absolute_import + from mock import patch + import pytest + +-from module_build_service import app ++from module_build_service import app, manage as mbs_manager + from module_build_service.common import models + from module_build_service.common.models import BUILD_STATES, ModuleBuild +-from module_build_service.manage import manager_wrapper, retire + from module_build_service.scheduler.db_session import db_session + from module_build_service.web.utils import deps_to_dict + from tests import clean_database, staged_data_filename +@@ -30,10 +29,12 @@ class TestMBSManage: + ) + def test_retire_identifier_validation(self, identifier, is_valid): + if is_valid: +- retire(identifier) ++ with pytest.raises(SystemExit) as exc_info: ++ mbs_manager.cli(["retire", identifier]) ++ assert 0 == exc_info + else: + with pytest.raises(ValueError): +- retire(identifier) ++ mbs_manager.cli(["retire", identifier]) + + @pytest.mark.parametrize( + ("overrides", "identifier", "changed_count"), +@@ -47,9 +48,9 @@ class TestMBSManage: + ({"context": "pickme"}, "spam:eggs:ham", 2), + ), + ) +- @patch("module_build_service.manage.prompt_bool") +- def test_retire_build(self, prompt_bool, overrides, identifier, changed_count): +- prompt_bool.return_value = True ++ @patch("click.confirm") ++ def test_retire_build(self, confirm, overrides, identifier, changed_count): ++ confirm.return_value = True + + module_builds = ( + db_session.query(ModuleBuild) +@@ -71,7 +72,10 @@ class TestMBSManage: + + db_session.commit() + +- retire(identifier) ++ with pytest.raises(SystemExit) as exc_info: ++ mbs_manager.cli(["retire", identifier]) ++ assert 0 == exc_info.value ++ + retired_module_builds = ( + db_session.query(ModuleBuild) + .filter_by(state=BUILD_STATES["garbage"]) +@@ -93,11 +97,11 @@ class TestMBSManage: + (False, True, True) + ), + ) +- @patch("module_build_service.manage.prompt_bool") ++ @patch("click.confirm") + def test_retire_build_confirm_prompt( +- self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected ++ self, confirm, confirm_prompt, confirm_arg, confirm_expected + ): +- prompt_bool.return_value = confirm_prompt ++ confirm.return_value = confirm_prompt + + module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all() + # Verify our assumption of the amount of ModuleBuilds in database +@@ -106,15 +110,17 @@ class TestMBSManage: + for x, build in enumerate(module_builds): + build.name = "spam" + str(x) if x > 0 else "spam" + build.stream = "eggs" +- + db_session.commit() + +- retire("spam:eggs", confirm_arg) ++ cmd = ["retire", "spam:eggs"] + (["--confirm"] if confirm_arg else []) ++ with pytest.raises(SystemExit) as exc_info: ++ mbs_manager.cli(cmd) ++ assert 0 == exc_info.value ++ ++ expected_changed_count = 1 if confirm_expected else 0 + retired_module_builds = ( + db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all() + ) +- +- expected_changed_count = 1 if confirm_expected else 0 + assert len(retired_module_builds) == expected_changed_count + + +@@ -156,7 +162,7 @@ class TestCommandBuildModuleLocally: + original_db_uri = app.config["SQLALCHEMY_DATABASE_URI"] + try: + with patch("sys.argv", new=cli_cmd): +- manager_wrapper() ++ mbs_manager.cli() + finally: + app.config["SQLALCHEMY_DATABASE_URI"] = original_db_uri + + \ No newline at end of file diff --git a/module-build-service.spec b/module-build-service.spec index c2e0a7c..47fec6b 100644 --- a/module-build-service.spec +++ b/module-build-service.spec @@ -1,55 +1,195 @@ +%if 0%{?fedora} || ( 0%{?rhel} && 0%{?rhel} >= 8 ) +# Not all python modules are built with Python3 in EPEL +%global with_python3 1 +%endif + Name: module-build-service -Version: 3.6.1 -Release: 6%{?dist} +Version: 3.8.0 +Release: 1%{?dist} Summary: The Module Build Service for Modularity License: MIT URL: https://pagure.io/fm-orchestrator Source0: https://files.pythonhosted.org/packages/source/m/%{name}/%{name}-%{version}.tar.gz -Patch0: mbs-PR1709-fix-scratch-build-suffix.patch -Patch1: mbs-PR1711-enforce-sane-module-state-transitions.patch -Patch2: mbs-PR1718-nudge-reused-components.patch -Patch3: mbs-PR1729-dont-filter-runtime-deps.patch -Patch4: mbs-PR1755-libmodulemd-workaround.patch +Patch0: mbs_fedora_click.patch BuildArch: noarch + +%if 0%{?rhel} ExclusiveArch: %{ix86} x86_64 noarch +%endif + +%if 0%{?with_python3} + +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python3-m2crypto +BuildRequires: python3-munch +BuildRequires: python3-funcsigs +BuildRequires: python3-solv +BuildRequires: python3-libmodulemd +BuildRequires: python3-openidc-client +BuildRequires: python3-ldap3 +BuildRequires: python3-koji +BuildRequires: python3-setuptools +BuildRequires: python3-click +BuildRequires: python3-flask-sqlalchemy +BuildRequires: python3-flask-migrate +BuildRequires: python3-six +BuildRequires: python3-flask +BuildRequires: python3-dogpile-cache +BuildRequires: python3-requests +BuildRequires: python3-pyOpenSSL +BuildRequires: python3-sqlalchemy +BuildRequires: python3-moksha-hub +BuildRequires: python3-kobo +BuildRequires: python3-kobo-rpmlib +BuildRequires: python3-fedmsg +BuildRequires: python3-pungi +BuildRequires: python3-prometheus_client +BuildRequires: python3-dnf +BuildRequires: python3-celery + +%else BuildRequires: python2-devel -BuildRequires: python-setuptools -Requires: git-core -Requires: gobject-introspection -Requires: kobo -Requires: kobo-rpmlib -# we need the fix from Koji PR#1653 from version 1.19.0 -Requires: koji >= 1.19.0 -Requires: python2-libmodulemd2 -Requires: python2-fedmsg +BuildRequires: m2crypto +BuildRequires: python-flask-script +BuildRequires: python-m2ext +BuildRequires: python-munch +BuildRequires: python2-funcsigs +BuildRequires: python2-solv +BuildRequires: python-openidc-client +BuildRequires: python-ldap3 +BuildRequires: python-enum34 +BuildRequires: python2-koji +BuildRequires: python2-fedmsg +BuildRequires: python2-prometheus_client +BuildRequires: python2-dnf +BuildRequires: python2-celery + +%if 0%{?rhel} && 0%{?rhel} <= 7 +BuildRequires: python-setuptools +BuildRequires: python-flask-sqlalchemy +BuildRequires: python-flask-migrate +BuildRequires: python-six +BuildRequires: pyOpenSSL +BuildRequires: python-sqlalchemy +BuildRequires: python-moksha-hub +BuildRequires: python-futures +BuildRequires: python-flask +BuildRequires: python-dogpile-cache +BuildRequires: python-backports-ssl_match_hostname +BuildRequires: python-requests +# On RHEL7 python2-libmodulemd is packaged as python2-libmodulemd2 +BuildRequires: python2-libmodulemd2 +%else +BuildRequires: python2-setuptools +BuildRequires: python2-flask-sqlalchemy +BuildRequires: python2-flask-migrate +BuildRequires: python2-six +BuildRequires: python2-futures +BuildRequires: python2-flask +BuildRequires: python2-dogpile-cache +BuildRequires: python2-requests +BuildRequires: python2-pyOpenSSL +BuildRequires: python2-sqlalchemy +BuildRequires: python2-moksha-hub +BuildRequires: python2-m2crypto +BuildRequires: python2-kobo +BuildRequires: python2-kobo-rpmlib +BuildRequires: python2-libmodulemd +# python2-pungi is not available in EPEL and can't be a BuildRequire for RHEL +BuildRequires: python2-pungi +%endif + +%endif + +BuildRequires: git-core +BuildRequires: help2man +BuildRequires: mock +BuildRequires: rpm-build + +%if 0%{?with_python3} +Requires: python3-click +Requires: python3-munch +Requires: python3-funcsigs +Requires: python3-openidc-client +Requires: python3-ldap3 +Requires: python3-libmodulemd +Requires: python3-solv +Requires: python3-koji +Requires: python3-flask-sqlalchemy +Requires: python3-flask-migrate +Requires: python3-six +Requires: python3-pungi +Requires: python3-sqlalchemy +Requires: python3-moksha-hub +Requires: python3-m2crypto +Requires: python3-kobo +Requires: python3-kobo-rpmlib +Requires: python3-flask +Requires: python3-dogpile-cache +Requires: python3-requests +Requires: python3-pyOpenSSL +Requires: python3-fedmsg +Requires: python3-prometheus_client +Requires: python3-dnf +Requires: python3-celery +%else +Requires: python-flask-script +Requires: python-munch Requires: python2-funcsigs +Requires: python-enum34 +Requires: python-openidc-client +Requires: python-ldap3 +Requires: python2-solv Requires: python2-koji -Requires: python2-prometheus_client +Requires: python2-fedmsg Requires: python2-pungi -Requires: python2-solv +Requires: python2-prometheus_client +Requires: python2-dnf Requires: python2-celery -Requires: python-backports-ssl_match_hostname -Requires: python-dogpile-cache -Requires: python-enum34 -Requires: python-flask -Requires: python-flask-migrate -Requires: python-flask-script + +%if 0%{?rhel} && 0%{?rhel} <= 7 Requires: python-flask-sqlalchemy -Requires: python-futures -Requires: python-gobject-base -Requires: python-ldap3 -Requires: python-moksha-hub -Requires: python-munch -Requires: python-openidc-client -Requires: python-requests +Requires: python-flask-migrate Requires: python-six Requires: python-sqlalchemy +Requires: python-moksha-hub +Requires: python-futures +Requires: python-flask +Requires: python-dogpile-cache +Requires: python-backports-ssl_match_hostname +Requires: python-requests +# On RHEL7 python2-libmodulemd is packaged as python2-libmodulemd2 +Requires: python2-libmodulemd2 +%else +Requires: python2-flask-sqlalchemy +Requires: python2-flask-migrate +Requires: python2-six +Requires: python2-futures +Requires: python2-flask +Requires: python2-dogpile-cache +Requires: python2-requests +Requires: python2-pyOpenSSL +Requires: python2-sqlalchemy +Requires: python2-moksha-hub +Requires: python2-m2crypto +Requires: python2-kobo +Requires: python2-kobo-rpmlib +Requires: python2-libmodulemd +%endif + +%endif + +Requires: fedpkg +Requires: git-core +Requires: mock Requires: rpm-build -Requires: python2-dnf +# https://bugzilla.redhat.com/show_bug.cgi?id=1466792 +Requires: mock-scm %description @@ -70,41 +210,86 @@ for a number of tasks: %prep %setup -q %patch0 -p1 -%patch1 -p1 -%patch2 -p1 -%patch3 -p1 -%patch4 -p1 + # Workaround because python2-koji has no egg-info file at the momement sed -i '/koji/d' requirements.txt +# Remove Python 2 only dependencies +sed -i '/futures/d' requirements.txt +sed -i '/enum34/d' requirements.txt %build +%if 0%{?with_python3} +%py3_build +%else %py2_build +%endif %install +%if 0%{?with_python3} +%py3_install +%else %py2_install +%endif + +%if 0%{?with_python3} +export PYTHONPATH=%{buildroot}%{python3_sitelib} +%else +export PYTHONPATH=%{buildroot}%{python2_sitelib} +%endif + +# The version of kobo required is not in RHEL/EPEL, so these commands will fail +%if 0%{?fedora} +mkdir -p %{buildroot}/%{_mandir}/man1 +for command in mbs-manager mbs-frontend mbs-upgradedb ; do + %{buildroot}/%{_bindir}/$command --help + help2man -N --version-string=%{version} \ + %{buildroot}/%{_bindir}/$command > \ + %{buildroot}/%{_mandir}/man1/$command.1 +done +%endif %files %doc README.rst %license LICENSE + +%if 0%{?with_python3} +%{python3_sitelib}/module_build_service* +%else %{python2_sitelib}/module_build_service* +%endif + %{_bindir}/mbs-* %dir %{_sysconfdir}/module-build-service %config(noreplace) %{_sysconfdir}/module-build-service/koji.conf -%config(noreplace) %{_sysconfdir}/module-build-service/cacert.pem %config(noreplace) %{_sysconfdir}/module-build-service/mock.cfg %config(noreplace) %{_sysconfdir}/module-build-service/yum.conf %config(noreplace) %{_sysconfdir}/fedmsg.d/mbs-scheduler.py %config(noreplace) %{_sysconfdir}/fedmsg.d/mbs-logging.py %config(noreplace) %{_sysconfdir}/fedmsg.d/module_build_service.py -%exclude %{_sysconfdir}/fedmsg.d/*.py[co] + +%if 0%{?with_python3} +%exclude %{python3_sitelib}/conf/ +%exclude %{python3_sitelib}/tests/ +%else %exclude %{python2_sitelib}/conf/ %exclude %{python2_sitelib}/tests/ +%exclude %{_sysconfdir}/fedmsg.d/*.py[co] +%endif +%if 0%{?fedora} +%{_mandir}/man1/mbs-*.1* +%endif %changelog +* Wed Feb 15 2023 Diego Herrera - 3.8.0-1 +- Bring changes from main branch +- EPEL8+ uses python3 +- Fix libmodulemd dependency on RHEL7 +- Exclude precompiled python files from config on python2 + * Fri Jul 13 2022 Brendan Reilly - 3.6.1-6 - Add patch to workaround libmodulemd issue diff --git a/sources b/sources index 4fb2d3f..5c3d41a 100644 --- a/sources +++ b/sources @@ -1 +1 @@ -SHA512 (module-build-service-3.6.1.tar.gz) = d0a714ab763d37cad067770114801968f33ee3db198acede08412366f85ebba46d220d74a29010db544a89b38fca525a7550f79e9a2a0ff3dc56fe0a7e7369e1 +SHA512 (module-build-service-3.8.0.tar.gz) = 3d5d822190fd2ba98156a38f7b84f96fd17a0f381b273512b71915f0513847c88300853ea6ce3f03fc78da001d20449130d72344b56768da23da01635d65d4d2