Blob Blame History Raw
From 389f32964e1050bd7bae7365797febf373b85055 Mon Sep 17 00:00:00 2001
From: "Owen W. Taylor" <otaylor@fishsoup.net>
Date: Thu, 19 Jul 2018 12:59:17 -0400
Subject: [PATCH 2/3] Clean up platform handling and restrict platforms by
 enabled clusters #1046

https://github.com/projectatomic/atomic-reactor/pull/1046

Don't always override platforms for scratch and isolated builds

A minor logic error resulted in every scratch and isolated build being treated
as an --arch-override build.

Add util.get_platforms() function

Add a utility function to get the list of platforms - this consolidates
some slightly different handling between different plugins. get_platforms()
returns the result of check_and_set_platforms, or if that was not run,
the 'platforms' parameter of the orchestrate_build plugin.

Assume that check_and_set_platforms is always run for orchestrated builds

It's difficult to have consistent handling of platforms when check_and_set_platforms
is sometimes run, and sometimes not. It is currently not run in the no-koji case.
Handle the no-koji case and remove fallback handling.

Skip platforms that aren't found in the cluster configuration

If there is a platform from the koji target (or from the osbs command
line) that isn't found in the reactor config map cluster configuration,
skip with a warning rather than erroring out. This is useful when the
koji target is being used for the package set, but has an architecture
list that's different than the set of available workers.
---
 atomic_reactor/plugins/build_orchestrate_build.py  |  19 +--
 atomic_reactor/plugins/post_pulp_pull.py           |   7 +-
 atomic_reactor/plugins/pre_add_filesystem.py       |  16 +-
 .../plugins/pre_check_and_set_platforms.py         |  55 ++++---
 atomic_reactor/plugins/pre_pull_base_image.py      |  13 +-
 atomic_reactor/plugins/pre_resolve_composes.py     |  21 +--
 atomic_reactor/util.py                             |  13 +-
 tests/plugins/test_add_filesystem.py               |  63 ++------
 tests/plugins/test_check_and_set_platforms.py      | 152 +++++++++++++++----
 tests/plugins/test_orchestrate_build.py            | 166 +++------------------
 tests/plugins/test_pull_base_image.py              |  14 --
 tests/plugins/test_resolve_composes.py             |  12 +-
 12 files changed, 228 insertions(+), 323 deletions(-)

diff --git a/atomic_reactor/plugins/build_orchestrate_build.py b/atomic_reactor/plugins/build_orchestrate_build.py
index 1645424..27c4535 100644
--- a/atomic_reactor/plugins/build_orchestrate_build.py
+++ b/atomic_reactor/plugins/build_orchestrate_build.py
@@ -38,10 +38,9 @@ from atomic_reactor.plugins.pre_reactor_config import (get_config,
                                                        get_build_image_override,
                                                        get_goarch_to_platform_mapping)
 from atomic_reactor.plugins.pre_check_and_set_rebuild import is_rebuild
-from atomic_reactor.util import (df_parser, get_build_json, get_manifest_list, ImageName,
-                                 get_platforms_in_limits)
-from atomic_reactor.constants import (PLUGIN_ADD_FILESYSTEM_KEY, PLUGIN_BUILD_ORCHESTRATE_KEY,
-                                      PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
+from atomic_reactor.util import (df_parser, get_build_json, get_manifest_list, get_platforms,
+                                 ImageName)
+from atomic_reactor.constants import (PLUGIN_ADD_FILESYSTEM_KEY, PLUGIN_BUILD_ORCHESTRATE_KEY)
 from osbs.api import OSBS
 from osbs.exceptions import OsbsException
 from osbs.conf import Configuration
@@ -271,6 +270,7 @@ class OrchestrateBuildPlugin(BuildStepPlugin):
         :param workflow: DockerBuildWorkflow instance
         :param build_kwargs: dict, keyword arguments for starting worker builds
         :param platforms: list<str>, platforms to build
+                          (used via utils.get_orchestrator_platforms())
         :param osbs_client_config: str, path to directory containing osbs.conf
         :param worker_build_image: str, the builder image to use for worker builds
                                   (not used, image is inherited from the orchestrator)
@@ -282,7 +282,7 @@ class OrchestrateBuildPlugin(BuildStepPlugin):
         :param goarch: dict, keys are platform, values are go language platform names
         """
         super(OrchestrateBuildPlugin, self).__init__(tasker, workflow)
-        self.platforms = self.get_platforms(platforms)
+        self.platforms = get_platforms(self.workflow)
 
         self.build_kwargs = build_kwargs
         self.osbs_client_config_fallback = osbs_client_config
@@ -420,15 +420,6 @@ class OrchestrateBuildPlugin(BuildStepPlugin):
         self.build_kwargs['arrangement_version'] =\
             get_arrangement_version(self.workflow, self.build_kwargs['arrangement_version'])
 
-    def get_platforms(self, fallback):
-        koji_platforms = self.workflow.prebuild_results.get(PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
-        if koji_platforms:
-            return koji_platforms
-
-        # if check_and_set_platforms didn't run, or didn't get any platforms from koji
-        # determine platforms from USER_PARAMS platforms parameter
-        return get_platforms_in_limits(self.workflow, fallback)
-
     def get_current_builds(self, osbs):
         field_selector = ','.join(['status!={status}'.format(status=status.capitalize())
                                    for status in BUILD_FINISHED_STATES])
diff --git a/atomic_reactor/plugins/post_pulp_pull.py b/atomic_reactor/plugins/post_pulp_pull.py
index b3d396c..10b6a08 100644
--- a/atomic_reactor/plugins/post_pulp_pull.py
+++ b/atomic_reactor/plugins/post_pulp_pull.py
@@ -17,14 +17,13 @@ from __future__ import unicode_literals
 
 from atomic_reactor.constants import (PLUGIN_PULP_PUSH_KEY, PLUGIN_PULP_SYNC_KEY,
                                       PLUGIN_GROUP_MANIFESTS_KEY,
-                                      PLUGIN_CHECK_AND_SET_PLATFORMS_KEY,
                                       MEDIA_TYPE_DOCKER_V1, MEDIA_TYPE_DOCKER_V2_SCHEMA1,
                                       MEDIA_TYPE_DOCKER_V2_SCHEMA2,
                                       MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST)
 
 from atomic_reactor.plugin import PostBuildPlugin, ExitPlugin
 from atomic_reactor.plugins.exit_remove_built_image import defer_removal
-from atomic_reactor.util import get_manifest_digests
+from atomic_reactor.util import get_manifest_digests, get_platforms
 from atomic_reactor.plugins.pre_reactor_config import (get_prefer_schema1_digest,
                                                        get_platform_to_goarch_mapping)
 import requests
@@ -189,10 +188,10 @@ class PulpPullPlugin(ExitPlugin, PostBuildPlugin):
         if self.workflow.postbuild_results.get(PLUGIN_GROUP_MANIFESTS_KEY):
             self.expect_v2schema2list = True
 
-            platforms = self.workflow.prebuild_results.get(PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
+            platforms = get_platforms(self.workflow)
             if not platforms:
                 self.log.debug('Cannot check if only manifest list digest should be checked '
-                               'because %s plugin did not run', PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
+                               'because we have no platforms list')
                 return
 
             try:
diff --git a/atomic_reactor/plugins/pre_add_filesystem.py b/atomic_reactor/plugins/pre_add_filesystem.py
index 9992a64..60eeda0 100644
--- a/atomic_reactor/plugins/pre_add_filesystem.py
+++ b/atomic_reactor/plugins/pre_add_filesystem.py
@@ -26,13 +26,12 @@ import json
 import re
 import os
 
-from atomic_reactor.constants import (DEFAULT_DOWNLOAD_BLOCK_SIZE, PLUGIN_ADD_FILESYSTEM_KEY,
-                                      PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
+from atomic_reactor.constants import (DEFAULT_DOWNLOAD_BLOCK_SIZE, PLUGIN_ADD_FILESYSTEM_KEY)
 from atomic_reactor.plugin import PreBuildPlugin, BuildCanceledException
 from atomic_reactor.plugins.exit_remove_built_image import defer_removal
 from atomic_reactor.plugins.pre_reactor_config import get_koji_session
 from atomic_reactor.koji_util import TaskWatcher, stream_task_output
-from atomic_reactor.util import get_retrying_requests_session
+from atomic_reactor.util import get_platforms, get_retrying_requests_session
 from atomic_reactor import util
 
 
@@ -101,7 +100,7 @@ class AddFilesystemPlugin(PreBuildPlugin):
                       base filesystem creation. First value will also
                       be used as install_tree. Only baseurl value is used
                       from each repo file.
-        :param architectures: list<str>, list of arches to build on (orchestrator)
+        :param architectures: list<str>, list of arches to build on (orchestrator) - UNUSED
         :param architecture: str, arch to build on (worker)
         """
         # call parent constructor
@@ -121,18 +120,11 @@ class AddFilesystemPlugin(PreBuildPlugin):
         self.poll_interval = poll_interval
         self.blocksize = blocksize
         self.repos = repos or []
-        self.architectures = self.get_arches(architectures)
+        self.architectures = get_platforms(self.workflow)
         self.is_orchestrator = True if self.architectures else False
         self.architecture = architecture
         self.scratch = util.is_scratch_build()
 
-    def get_arches(self, fallback):
-        architectures = self.workflow.prebuild_results.get(PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
-        if architectures:
-            return list(architectures)
-
-        return fallback
-
     def is_image_build_type(self, base_image):
         return base_image.strip().lower() == 'koji/image-build'
 
diff --git a/atomic_reactor/plugins/pre_check_and_set_platforms.py b/atomic_reactor/plugins/pre_check_and_set_platforms.py
index a819eb1..46aafc0 100644
--- a/atomic_reactor/plugins/pre_check_and_set_platforms.py
+++ b/atomic_reactor/plugins/pre_check_and_set_platforms.py
@@ -17,7 +17,7 @@ when koji build tags change.
 from atomic_reactor.plugin import PreBuildPlugin
 from atomic_reactor.util import (get_platforms_in_limits, is_scratch_build, is_isolated_build,
                                  get_orchestrator_platforms)
-from atomic_reactor.plugins.pre_reactor_config import get_koji_session, NO_FALLBACK
+from atomic_reactor.plugins.pre_reactor_config import get_config, get_koji_session, NO_FALLBACK
 from atomic_reactor.constants import PLUGIN_CHECK_AND_SET_PLATFORMS_KEY
 
 
@@ -25,7 +25,7 @@ class CheckAndSetPlatformsPlugin(PreBuildPlugin):
     key = PLUGIN_CHECK_AND_SET_PLATFORMS_KEY
     is_allowed_to_fail = False
 
-    def __init__(self, tasker, workflow, koji_target):
+    def __init__(self, tasker, workflow, koji_target=None):
 
         """
         constructor
@@ -37,28 +37,45 @@ class CheckAndSetPlatformsPlugin(PreBuildPlugin):
         # call parent constructor
         super(CheckAndSetPlatformsPlugin, self).__init__(tasker, workflow)
         self.koji_target = koji_target
+        self.reactor_config = get_config(self.workflow)
 
     def run(self):
         """
         run the plugin
         """
-        koji_session = get_koji_session(self.workflow, NO_FALLBACK)
-        self.log.info("Checking koji target for platforms")
-        event_id = koji_session.getLastEvent()['id']
-        target_info = koji_session.getBuildTarget(self.koji_target, event=event_id)
-        build_tag = target_info['build_tag']
-        koji_build_conf = koji_session.getBuildConfig(build_tag, event=event_id)
-        koji_platforms = koji_build_conf['arches']
-        if not koji_platforms:
-            self.log.info("No platforms found in koji target")
+        if self.koji_target:
+            koji_session = get_koji_session(self.workflow, NO_FALLBACK)
+            self.log.info("Checking koji target for platforms")
+            event_id = koji_session.getLastEvent()['id']
+            target_info = koji_session.getBuildTarget(self.koji_target, event=event_id)
+            build_tag = target_info['build_tag']
+            koji_build_conf = koji_session.getBuildConfig(build_tag, event=event_id)
+            koji_platforms = koji_build_conf['arches']
+            if not koji_platforms:
+                self.log.info("No platforms found in koji target")
+                return None
+            platforms = koji_platforms.split()
+
+            if is_scratch_build() or is_isolated_build():
+                override_platforms = get_orchestrator_platforms(self.workflow)
+                if override_platforms and set(override_platforms) != set(platforms):
+                    # platforms from user params do not match platforms from koji target
+                    # that almost certainly means they were overridden and should be used
+                    return set(override_platforms)
+        else:
+            platforms = get_orchestrator_platforms(self.workflow)
+
+        if not platforms:
+            # Not an orchestrated build
             return None
-        platforms = koji_platforms.split()
 
-        if is_scratch_build() or is_isolated_build():
-            override_platforms = get_orchestrator_platforms(self.workflow)
-            if override_platforms and set(override_platforms) != koji_platforms:
-                # platforms from user params do not match platforms from koji target
-                # that almost certainly means they were overridden and should be used
-                return set(override_platforms)
+        # Filter platforms based on clusters
+        enabled_platforms = []
+        for p in platforms:
+            if self.reactor_config.get_enabled_clusters_for_platform(p):
+                enabled_platforms.append(p)
+            else:
+                self.log.warning(
+                    "No cluster found for platform '%s' in reactor config map, skipping", p)
 
-        return get_platforms_in_limits(self.workflow, platforms)
+        return get_platforms_in_limits(self.workflow, enabled_platforms)
diff --git a/atomic_reactor/plugins/pre_pull_base_image.py b/atomic_reactor/plugins/pre_pull_base_image.py
index 52b37c5..c13f94b 100644
--- a/atomic_reactor/plugins/pre_pull_base_image.py
+++ b/atomic_reactor/plugins/pre_pull_base_image.py
@@ -22,8 +22,7 @@ import platform
 from atomic_reactor.plugin import PreBuildPlugin
 from atomic_reactor.util import (get_build_json, get_manifest_list,
                                  get_config_from_registry, ImageName,
-                                 get_orchestrator_platforms)
-from atomic_reactor.constants import PLUGIN_CHECK_AND_SET_PLATFORMS_KEY
+                                 get_platforms)
 from atomic_reactor.core import RetryGeneratorException
 from atomic_reactor.plugins.pre_reactor_config import (get_source_registry,
                                                        get_platform_to_goarch_mapping,
@@ -235,7 +234,7 @@ class PullBaseImagePlugin(PreBuildPlugin):
 
     def _validate_platforms_in_image(self, image):
         """Ensure that the image provides all platforms expected for the build."""
-        expected_platforms = self._get_expected_platforms()
+        expected_platforms = get_platforms(self.workflow)
         if not expected_platforms:
             self.log.info('Skipping validation of available platforms '
                           'because expected platforms are unknown')
@@ -275,11 +274,3 @@ class PullBaseImagePlugin(PreBuildPlugin):
             'Missing arches in manifest list for base image'
 
         self.log.info('Base image is a manifest list for all required platforms')
-
-    def _get_expected_platforms(self):
-        """retrieve expected platforms configured for this build"""
-        platforms = self.workflow.prebuild_results.get(PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
-        if platforms:
-            return platforms
-
-        return get_orchestrator_platforms(self.workflow)
diff --git a/atomic_reactor/plugins/pre_resolve_composes.py b/atomic_reactor/plugins/pre_resolve_composes.py
index 65cfd77..9056bba 100644
--- a/atomic_reactor/plugins/pre_resolve_composes.py
+++ b/atomic_reactor/plugins/pre_resolve_composes.py
@@ -13,8 +13,7 @@ import yaml
 from collections import defaultdict
 
 from atomic_reactor.constants import (PLUGIN_KOJI_PARENT_KEY, PLUGIN_RESOLVE_COMPOSES_KEY,
-                                      REPO_CONTENT_SETS_CONFIG, PLUGIN_BUILD_ORCHESTRATE_KEY,
-                                      PLUGIN_CHECK_AND_SET_PLATFORMS_KEY, BASE_IMAGE_KOJI_BUILD)
+                                      REPO_CONTENT_SETS_CONFIG, BASE_IMAGE_KOJI_BUILD)
 
 from atomic_reactor.plugin import PreBuildPlugin
 from atomic_reactor.plugins.build_orchestrate_build import override_build_kwarg
@@ -22,6 +21,7 @@ from atomic_reactor.plugins.pre_check_and_set_rebuild import is_rebuild
 from atomic_reactor.plugins.pre_reactor_config import (get_config,
                                                        get_odcs_session,
                                                        get_koji_session, get_koji)
+from atomic_reactor.util import get_platforms
 
 ODCS_DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
 MINIMUM_TIME_TO_EXPIRE = timedelta(hours=2).total_seconds()
@@ -135,16 +135,6 @@ class ResolveComposesPlugin(PreBuildPlugin):
             self.log.info('Autorebuild detected: Ignoring compose_ids plugin parameter')
             self.compose_ids = tuple()
 
-    def get_arches(self):
-        platforms = self.workflow.prebuild_results.get(PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
-        if platforms:
-            return [platform for platform in platforms]
-
-        # Fallback to build_orchestrate_build args if check_and_set_platforms didn't run
-        for plugin in self.workflow.buildstep_plugins_conf:
-            if plugin['name'] == PLUGIN_BUILD_ORCHESTRATE_KEY:
-                return plugin['args']['platforms']
-
     def read_configs(self):
         self.odcs_config = get_config(self.workflow).get_odcs_config()
         if not self.odcs_config:
@@ -161,9 +151,12 @@ class ResolveComposesPlugin(PreBuildPlugin):
             with open(file_path) as f:
                 pulp_data = yaml.safe_load(f) or {}
 
-        arches = self.get_arches()
+        platforms = get_platforms(self.workflow)
+        if platforms:
+            platforms = sorted(platforms)
 
-        self.compose_config = ComposeConfig(data, pulp_data, self.odcs_config, arches=arches)
+        self.compose_config = ComposeConfig(data, pulp_data, self.odcs_config,
+                                            arches=platforms)
 
     def adjust_compose_config(self):
         if self.signing_intent:
diff --git a/atomic_reactor/util.py b/atomic_reactor/util.py
index c3e83e6..701e12f 100644
--- a/atomic_reactor/util.py
+++ b/atomic_reactor/util.py
@@ -41,7 +41,9 @@ from atomic_reactor.constants import (DOCKERFILE_FILENAME, REPO_CONTAINER_CONFIG
                                       MEDIA_TYPE_DOCKER_V2_SCHEMA1, MEDIA_TYPE_DOCKER_V2_SCHEMA2,
                                       MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST, MEDIA_TYPE_OCI_V1,
                                       MEDIA_TYPE_OCI_V1_INDEX, GIT_MAX_RETRIES, GIT_BACKOFF_FACTOR,
-                                      PLUGIN_BUILD_ORCHESTRATE_KEY, PLUGIN_KOJI_PARENT_KEY,
+                                      PLUGIN_BUILD_ORCHESTRATE_KEY,
+                                      PLUGIN_CHECK_AND_SET_PLATFORMS_KEY,
+                                      PLUGIN_KOJI_PARENT_KEY,
                                       PARENT_IMAGE_BUILDS_KEY, PARENT_IMAGES_KOJI_BUILDS,
                                       BASE_IMAGE_KOJI_BUILD, BASE_IMAGE_BUILD_ID_KEY)
 
@@ -608,6 +610,15 @@ def get_orchestrator_platforms(workflow):
             return plugin['args']['platforms']
 
 
+def get_platforms(workflow):
+    koji_platforms = workflow.prebuild_results.get(PLUGIN_CHECK_AND_SET_PLATFORMS_KEY)
+    if koji_platforms:
+        return koji_platforms
+
+    # Not an orchestrated build
+    return None
+
+
 # copypasted and slightly modified from
 # http://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size/1094933#1094933
 def human_size(num, suffix='B'):
diff --git a/tests/plugins/test_add_filesystem.py b/tests/plugins/test_add_filesystem.py
index 16d872e..2ec4281 100644
--- a/tests/plugins/test_add_filesystem.py
+++ b/tests/plugins/test_add_filesystem.py
@@ -196,11 +196,15 @@ def mock_workflow(tmpdir, dockerfile=DEFAULT_DOCKERFILE):
     return workflow
 
 
-def create_plugin_instance(tmpdir, kwargs=None, scratch=False, reactor_config_map=False):  # noqa
+def create_plugin_instance(tmpdir, kwargs=None, scratch=False, reactor_config_map=False,  # noqa
+                           architectures=None):
     flexmock(util).should_receive('is_scratch_build').and_return(scratch)
     tasker = flexmock()
     workflow = mock_workflow(tmpdir)
 
+    if architectures:
+        workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(architectures)
+
     if kwargs is None:
         kwargs = {}
 
@@ -292,50 +296,6 @@ def test_add_filesystem_plugin_legacy(tmpdir, docker_tasker, scratch, reactor_co
     assert 'filesystem-koji-task-id' in plugin_result
 
 
-@pytest.mark.parametrize(('global_arches', 'param_arches', 'expected_arches'), (
-    (['x86_64'], None, ['x86_64']),
-    (None, ['x86_64'], ['x86_64']),
-    (['x86_64', 'ppc64le'], None, ['x86_64', 'ppc64le']),
-    (None, ['x86_64', 'ppc64le'], ['x86_64', 'ppc64le']),
-    (['x86_64'], ['spam'], ['x86_64']),
-    (['x86_64', 'ppc64le'], ['spam', 'bacon'], ['x86_64', 'ppc64le']),
-))
-def test_use_check_and_set_platforms_result(tmpdir, docker_tasker, global_arches, param_arches,
-                                            expected_arches):
-    """
-    global_arches: list of architectures returned by check_and_set_platforms plugin
-    param_arches: list of architectures given to add_filesystem plugin as parameter
-    """
-    if MOCK:
-        mock_docker()
-
-    workflow = mock_workflow(tmpdir)
-    mock_koji_session(arches=expected_arches)
-    mock_image_build_file(str(tmpdir))
-
-    if global_arches:
-        workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(global_arches)
-
-    plugin_args = {}
-    if param_arches:
-        plugin_args['architectures'] = param_arches
-
-    runner = PreBuildPluginsRunner(
-        docker_tasker,
-        workflow,
-        [{
-            'name': PLUGIN_ADD_FILESYSTEM_KEY,
-            'args': plugin_args
-        }]
-    )
-
-    results = runner.run()
-    plugin_result = results[PLUGIN_ADD_FILESYSTEM_KEY]
-    assert 'base-image-id' in plugin_result
-    assert plugin_result['base-image-id'] is None
-    assert 'filesystem-koji-task-id' in plugin_result
-
-
 @pytest.mark.parametrize(('base_image', 'type_match'), [
     ('koji/image-build', True),
     ('KoJi/ImAgE-bUiLd  \n', True),
@@ -529,9 +489,9 @@ def test_image_build_overwrites(tmpdir, architectures, architecture, reactor_con
                     """))
     plugin = create_plugin_instance(tmpdir, {
         'repos': repos,
-        'architectures': architectures,
         'architecture': architecture
-    }, reactor_config_map=reactor_config_map)
+    }, reactor_config_map=reactor_config_map,
+                                    architectures=architectures)
     image_build_conf = dedent("""\
         [image-build]
         name = my-name
@@ -562,10 +522,13 @@ def test_image_build_overwrites(tmpdir, architectures, architecture, reactor_con
         config_arch = [architecture]
     else:
         config_arch = ['i386', 'i486']
+
+    # Sort architectures for comparsion
+    config[2] = sorted(config[2])
     assert config == [
         'my-name',
         '1.0',
-        config_arch,
+        sorted(config_arch),
         'guest-fedora-23-docker',
         'http://install-tree.com/$arch/fedora23/',
     ]
@@ -628,6 +591,9 @@ def test_image_download(tmpdir, docker_tasker, architecture, architectures, down
     mock_koji_session(download_filesystem=download_filesystem)
     mock_image_build_file(str(tmpdir))
 
+    if architectures:
+        workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(architectures)
+
     if reactor_config_map:
         make_and_store_reactor_config_map(workflow, {'root_url': '', 'auth': {}})
 
@@ -639,7 +605,6 @@ def test_image_download(tmpdir, docker_tasker, architecture, architectures, down
             'args': {
                 'koji_hub': KOJI_HUB,
                 'architecture': architecture,
-                'architectures': architectures,
             }
         }]
     )
diff --git a/tests/plugins/test_check_and_set_platforms.py b/tests/plugins/test_check_and_set_platforms.py
index 8265fcb..79adc1a 100644
--- a/tests/plugins/test_check_and_set_platforms.py
+++ b/tests/plugins/test_check_and_set_platforms.py
@@ -66,6 +66,45 @@ class MockSource(object):
         return self.path, self.path
 
 
+class MockClusterConfig(object):
+    enabled = True
+
+
+class MockConfig(object):
+    def __init__(self, platforms):
+        if platforms:
+            self.platforms = set(platforms.split())
+        else:
+            self.platforms = ['x86_64']
+
+    def get_enabled_clusters_for_platform(self, platform):
+        if platform in self.platforms:
+            return MockClusterConfig
+        else:
+            return []
+
+
+def write_container_yaml(tmpdir, platform_exclude='', platform_only=''):
+    platforms_dict = {}
+    if platform_exclude != '':
+        platforms_dict['platforms'] = {}
+        platforms_dict['platforms']['not'] = platform_exclude
+    if platform_only != '':
+        if 'platforms' not in platforms_dict:
+            platforms_dict['platforms'] = {}
+        platforms_dict['platforms']['only'] = platform_only
+
+    container_path = os.path.join(str(tmpdir), REPO_CONTAINER_CONFIG)
+    with open(container_path, 'w') as f:
+        f.write(yaml.safe_dump(platforms_dict))
+        f.flush()
+
+
+def set_orchestrator_platforms(workflow, orchestrator_platforms):
+    workflow.buildstep_plugins_conf = [{'name': PLUGIN_BUILD_ORCHESTRATE_KEY,
+                                        'args': {'platforms': orchestrator_platforms}}]
+
+
 def prepare(tmpdir):
     if MOCK:
         mock_docker()
@@ -96,19 +135,7 @@ def prepare(tmpdir):
     ('x86_64 ppc64le', '', '', ['x86_64', 'ppc64le'])
 ])
 def test_check_and_set_platforms(tmpdir, platforms, platform_exclude, platform_only, result):
-    platforms_dict = {}
-    if platform_exclude != '':
-        platforms_dict['platforms'] = {}
-        platforms_dict['platforms']['not'] = platform_exclude
-    if platform_only != '':
-        if 'platforms' not in platforms_dict:
-            platforms_dict['platforms'] = {}
-        platforms_dict['platforms']['only'] = platform_only
-
-    container_path = os.path.join(str(tmpdir), REPO_CONTAINER_CONFIG)
-    with open(container_path, 'w') as f:
-        f.write(yaml.safe_dump(platforms_dict))
-        f.flush()
+    write_container_yaml(tmpdir, platform_exclude, platform_only)
 
     tasker, workflow = prepare(tmpdir)
 
@@ -123,6 +150,9 @@ def test_check_and_set_platforms(tmpdir, platforms, platform_exclude, platform_o
     flexmock(reactor_config).should_receive('get_koji').and_return(mock_koji_config)
     flexmock(koji_util).should_receive('create_koji_session').and_return(session)
 
+    mock_config = MockConfig(platforms)
+    flexmock(reactor_config).should_receive('get_config').and_return(mock_config)
+
     runner = PreBuildPluginsRunner(tasker, workflow, [{
         'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY,
         'args': {'koji_target': KOJI_TARGET},
@@ -136,26 +166,25 @@ def test_check_and_set_platforms(tmpdir, platforms, platform_exclude, platform_o
         assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] is None
 
 
-@pytest.mark.parametrize(('labels', 'platforms', 'orchestrator_platforms', 'result'), [
-    ({}, None, None, None),
-    ({}, 'x86_64 arm64', ['spam', 'bacon'], ['arm64', 'x86_64']),
-    ({'isolated': True}, 'spam bacon', ['x86_64', 'arm64'], ['arm64', 'x86_64']),
-    ({'isolated': True}, 'x86_64 arm64', None, ['arm64', 'x86_64']),
-    ({'isolated': True}, None, ['x86_64', 'arm64'], None),
-    ({'scratch': True}, 'spam bacon', ['x86_64', 'arm64'], ['arm64', 'x86_64']),
-    ({'scratch': True}, 'x86_64 arm64', None, ['arm64', 'x86_64']),
-    ({'scratch': True}, None, ['x86_64', 'arm64'], None),
+@pytest.mark.parametrize(('labels', 'platforms', 'orchestrator_platforms', 'platform_only',
+                          'result'), [
+    ({}, None, None, '', None),
+    ({}, 'x86_64 arm64', ['spam', 'bacon'], '', ['arm64', 'x86_64']),
+    ({'isolated': True}, 'spam bacon', ['x86_64', 'arm64'], '', ['arm64', 'x86_64']),
+    ({'isolated': True}, 'x86_64 arm64', None, '', ['arm64', 'x86_64']),
+    ({'isolated': True}, None, ['x86_64', 'arm64'], '', None),
+    ({'scratch': True}, 'spam bacon', ['x86_64', 'arm64'], '', ['arm64', 'x86_64']),
+    ({'scratch': True}, 'x86_64 arm64', None, '', ['arm64', 'x86_64']),
+    ({'scratch': True}, None, ['x86_64', 'arm64'], '', None),
+    ({'scratch': True}, 'x86_64 arm64', ['x86_64', 'arm64'], 'x86_64', ['x86_64']),
 ])
-def test_check_isolated_or_scratch(tmpdir, labels, platforms, orchestrator_platforms, result):
-    container_path = os.path.join(str(tmpdir), REPO_CONTAINER_CONFIG)
-    with open(container_path, 'w') as f:
-        f.write(yaml.safe_dump({}))
-        f.flush()
+def test_check_isolated_or_scratch(tmpdir, labels, platforms,
+                                   orchestrator_platforms, platform_only, result):
+    write_container_yaml(tmpdir, platform_only=platform_only)
 
     tasker, workflow = prepare(tmpdir)
     if orchestrator_platforms:
-        workflow.buildstep_plugins_conf = [{'name': PLUGIN_BUILD_ORCHESTRATE_KEY,
-                                            'args': {'platforms': orchestrator_platforms}}]
+        set_orchestrator_platforms(workflow, orchestrator_platforms)
 
     build_json = {'metadata': {'labels': labels}}
     flexmock(util).should_receive('get_build_json').and_return(build_json)
@@ -168,6 +197,9 @@ def test_check_isolated_or_scratch(tmpdir, labels, platforms, orchestrator_platf
     flexmock(reactor_config).should_receive('get_koji').and_return(mock_koji_config)
     flexmock(koji_util).should_receive('create_koji_session').and_return(session)
 
+    mock_config = MockConfig(platforms)
+    flexmock(reactor_config).should_receive('get_config').and_return(mock_config)
+
     runner = PreBuildPluginsRunner(tasker, workflow, [{
         'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY,
         'args': {'koji_target': KOJI_TARGET},
@@ -179,3 +211,65 @@ def test_check_isolated_or_scratch(tmpdir, labels, platforms, orchestrator_platf
         assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result)
     else:
         assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] is None
+
+
+@pytest.mark.parametrize(('platforms', 'platform_only', 'result'), [
+    (None, 'ppc64le', None),
+    ('x86_64 ppc64le', '', ['x86_64', 'ppc64le']),
+    ('x86_64 ppc64le', 'ppc64le', ['ppc64le']),
+])
+def test_check_and_set_platforms_no_koji(tmpdir, platforms, platform_only, result):
+    write_container_yaml(tmpdir, platform_only=platform_only)
+
+    tasker, workflow = prepare(tmpdir)
+
+    if platforms:
+        set_orchestrator_platforms(workflow, platforms.split())
+
+    build_json = {'metadata': {'labels': {}}}
+    flexmock(util).should_receive('get_build_json').and_return(build_json)
+
+    mock_config = MockConfig(platforms)
+    flexmock(reactor_config).should_receive('get_config').and_return(mock_config)
+
+    runner = PreBuildPluginsRunner(tasker, workflow, [{
+        'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY,
+    }])
+
+    plugin_result = runner.run()
+    if platforms:
+        assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY]
+        assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result)
+    else:
+        assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] is None
+
+
+@pytest.mark.parametrize(('platforms', 'platform_only', 'cluster_platforms', 'result'), [
+    ('x86_64 ppc64le', '', 'x86_64', ['x86_64']),
+    ('x86_64 ppc64le arm64', ['x86_64', 'arm64'], 'x86_64', ['x86_64']),
+])
+def test_platforms_from_cluster_config(tmpdir, platforms, platform_only,
+                                       cluster_platforms, result):
+    write_container_yaml(tmpdir, platform_only=platform_only)
+
+    tasker, workflow = prepare(tmpdir)
+
+    if platforms:
+        set_orchestrator_platforms(workflow, platforms.split())
+
+    build_json = {'metadata': {'labels': {}}}
+    flexmock(util).should_receive('get_build_json').and_return(build_json)
+
+    mock_config = MockConfig(cluster_platforms)
+    flexmock(reactor_config).should_receive('get_config').and_return(mock_config)
+
+    runner = PreBuildPluginsRunner(tasker, workflow, [{
+        'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY,
+    }])
+
+    plugin_result = runner.run()
+    if platforms:
+        assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY]
+        assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result)
+    else:
+        assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] is None
diff --git a/tests/plugins/test_orchestrate_build.py b/tests/plugins/test_orchestrate_build.py
index b66c173..b69b996 100644
--- a/tests/plugins/test_orchestrate_build.py
+++ b/tests/plugins/test_orchestrate_build.py
@@ -41,7 +41,6 @@ import json
 import os
 import pytest
 import time
-import yaml
 import platform
 
 
@@ -122,7 +121,7 @@ class fake_manifest_list(object):
         return self.content
 
 
-def mock_workflow(tmpdir):
+def mock_workflow(tmpdir, platforms=['x86_64', 'ppc64le']):
     workflow = DockerBuildWorkflow(MOCK_SOURCE, TEST_IMAGE)
     builder = MockInsideBuilder()
     source = MockSource(tmpdir)
@@ -141,6 +140,8 @@ def mock_workflow(tmpdir):
     df = df_parser(df_path)
     setattr(workflow.builder, 'df_path', df.dockerfile_path)
 
+    workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(platforms)
+
     build = {
         "spec": {
             "strategy": {
@@ -275,7 +276,7 @@ def make_worker_build_kwargs(**overrides):
 ])
 def test_orchestrate_build(tmpdir, caplog, config_kwargs,
                            worker_build_image, logs_return_bytes, reactor_config_map):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64'])
     mock_osbs(logs_return_bytes=logs_return_bytes)
     plugin_args = {
         'platforms': ['x86_64'],
@@ -653,7 +654,7 @@ def test_orchestrate_choose_cluster_retry_timeout(tmpdir):
 
 
 def test_orchestrate_build_cancelation(tmpdir):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64'])
     mock_osbs()
     mock_manifest_list()
     mock_reactor_config(tmpdir)
@@ -761,144 +762,10 @@ def test_orchestrate_build_choose_clusters(tmpdir, clusters_x86_64,
         assert plat_annotations['build']['cluster-url'] == 'https://chosen_{}.com/'.format(plat)
 
 
-@pytest.mark.parametrize(('platforms', 'platform_exclude', 'platform_only', 'result'), [
-    (['x86_64', 'ppc64le'], '', 'ppc64le', ['ppc64le']),
-    (['x86_64', 'spam', 'bacon', 'toast', 'ppc64le'], ['spam', 'bacon', 'eggs', 'toast'], '',
-     ['x86_64', 'ppc64le']),
-    (['ppc64le', 'spam', 'bacon', 'toast'], ['spam', 'bacon', 'eggs', 'toast'], 'ppc64le',
-     ['ppc64le']),
-    (['x86_64', 'bacon', 'toast'], 'toast', ['x86_64', 'ppc64le'], ['x86_64']),
-    (['x86_64', 'toast'], 'toast', 'x86_64', ['x86_64']),
-    (['x86_64', 'spam', 'bacon', 'toast'], ['spam', 'bacon', 'eggs', 'toast'], ['x86_64',
-                                                                                'ppc64le'],
-     ['x86_64']),
-    (['x86_64', 'ppc64le'], '', '', ['x86_64', 'ppc64le'])
-])
-def test_orchestrate_build_exclude_platforms(tmpdir, platforms, platform_exclude, platform_only,
-                                             result):
-    workflow = mock_workflow(tmpdir)
-    mock_osbs()
-    mock_manifest_list()
-
-    reactor_config = {
-        'x86_64': [
-            {
-                'name': 'worker01',
-                'max_concurrent_builds': 3
-            }
-        ],
-        'ppc64le': [
-            {
-                'name': 'worker02',
-                'max_concurrent_builds': 3
-            }
-        ]
-    }
-
-    for exclude in ('spam', 'bacon', 'eggs'):
-        reactor_config[exclude] = [
-            {'name': 'worker-{}'.format(exclude), 'max_concurrent_builds': 3}
-        ]
-
-    mock_reactor_config(tmpdir, reactor_config)
-
-    platforms_dict = {}
-    if platform_exclude != '':
-        platforms_dict['platforms'] = {}
-        platforms_dict['platforms']['not'] = platform_exclude
-    if platform_only != '':
-        if 'platforms' not in platforms_dict:
-            platforms_dict['platforms'] = {}
-        platforms_dict['platforms']['only'] = platform_only
-
-    with open(os.path.join(str(tmpdir), 'container.yaml'), 'w') as f:
-        f.write(yaml.safe_dump(platforms_dict))
-        f.flush()
-
-    runner = BuildStepPluginsRunner(
-        workflow.builder.tasker,
-        workflow,
-        [{
-            'name': OrchestrateBuildPlugin.key,
-            'args': {
-                # Explicitly leaving off 'eggs' platform to
-                # ensure no errors occur when unknown platform
-                # is provided in container.yaml file.
-                'platforms': platforms,
-                'build_kwargs': make_worker_build_kwargs(),
-                'osbs_client_config': str(tmpdir),
-                'goarch': {'x86_64': 'amd64'},
-            }
-        }]
-    )
-
-    build_result = runner.run()
-    assert not build_result.is_failed()
-
-    annotations = build_result.annotations
-    assert set(annotations['worker-builds'].keys()) == set(result)
-
-
-@pytest.mark.parametrize(('platforms', 'plugin_results', 'result'), [
-    (['x86_64', 'ppc64le'], ['ppc64le'], ['ppc64le']),
-    (['x86_64', 'spam', 'bacon', 'toast', 'ppc64le'], ['x86_64', 'ppc64le'],
-     ['x86_64', 'ppc64le']),
-    (['x86_64', 'ppc64le'], None, ['x86_64', 'ppc64le']),
-    (None, ['x86_64', 'ppc64le'], ['x86_64', 'ppc64le']),
-])
-def test_orchestrate_build_exclude_platforms_from_plugin(tmpdir, platforms, plugin_results, result):
-    workflow = mock_workflow(tmpdir)
-    mock_osbs()
-    mock_manifest_list()
-
-    reactor_config = {
-        'x86_64': [
-            {
-                'name': 'worker01',
-                'max_concurrent_builds': 3
-            }
-        ],
-        'ppc64le': [
-            {
-                'name': 'worker02',
-                'max_concurrent_builds': 3
-            }
-        ]
-    }
-
-    mock_reactor_config(tmpdir, reactor_config)
-
-    if plugin_results:
-        workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(plugin_results)
-    else:
-        workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = None
-
-    plugin_args = {
-        'build_kwargs': make_worker_build_kwargs(),
-        'osbs_client_config': str(tmpdir),
-        'goarch': {'x86_64': 'amd64'},
-    }
-    if platforms:
-        plugin_args['platforms'] = platforms
-
-    runner = BuildStepPluginsRunner(
-        workflow.builder.tasker,
-        workflow,
-        [{
-            'name': OrchestrateBuildPlugin.key,
-            'args': plugin_args
-        }]
-    )
-
-    build_result = runner.run()
-    assert not build_result.is_failed()
-
-    annotations = build_result.annotations
-    assert set(annotations['worker-builds'].keys()) == set(result)
-
-
+# This test tests code paths that can no longer be hit in actual operation since
+# we exclude platforms with no clusters in check_and_set_platforms.
 def test_orchestrate_build_unknown_platform(tmpdir, reactor_config_map):  # noqa
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64', 'spam'])
     mock_osbs()
     mock_manifest_list()
     if reactor_config_map:
@@ -1094,7 +961,7 @@ def test_orchestrate_build_failed_waiting(tmpdir,
     (None, 'TypeError'),
 ])
 def test_orchestrate_build_get_fs_task_id(tmpdir, task_id, error):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64'])
     mock_osbs()
 
     mock_reactor_config(tmpdir)
@@ -1128,7 +995,7 @@ def test_orchestrate_build_get_fs_task_id(tmpdir, task_id, error):
 
 @pytest.mark.parametrize('fail_at', ('all', 'first'))
 def test_orchestrate_build_failed_to_list_builds(tmpdir, fail_at):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64'])
     mock_osbs()  # Current builds is a constant 2
 
     mock_reactor_config(tmpdir, {
@@ -1184,7 +1051,7 @@ def test_orchestrate_build_failed_to_list_builds(tmpdir, fail_at):
     False
 ])
 def test_orchestrate_build_worker_build_kwargs(tmpdir, caplog, is_auto):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64'])
     expected_kwargs = {
         'git_uri': SOURCE['uri'],
         'git_ref': 'master',
@@ -1233,7 +1100,7 @@ def test_orchestrate_build_worker_build_kwargs(tmpdir, caplog, is_auto):
     {'x86_64': '4242', None: '1111'},
 ])
 def test_orchestrate_override_build_kwarg(tmpdir, overrides):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64'])
     expected_kwargs = {
         'git_uri': SOURCE['uri'],
         'git_ref': 'master',
@@ -1287,7 +1154,7 @@ def test_orchestrate_override_build_kwarg(tmpdir, overrides):
     ['v2'],
 ])
 def test_orchestrate_override_content_versions(tmpdir, caplog, enable_v1, content_versions):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=['x86_64'])
     expected_kwargs = {
         'git_uri': SOURCE['uri'],
         'git_ref': 'master',
@@ -1678,7 +1545,7 @@ def test_set_build_image_raises(tmpdir, build, exc_str, bc, bc_cont, ims, ims_co
 def test_set_build_image_works(tmpdir, build, bc, bc_cont, ims, ims_cont, ml, ml_cont,
                                platforms):
     build = json.dumps(build)
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=platforms)
 
     orchestrator_default_platform = 'x86_64'
     (flexmock(platform)
@@ -1727,7 +1594,7 @@ def test_set_build_image_works(tmpdir, build, bc, bc_cont, ims, ims_cont, ml, ml
     (['ppc64le'], ['ppc64le']),
 ])
 def test_set_build_image_with_override(tmpdir, platforms, override):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=platforms)
 
     default_build_image = 'registry/osbs-buildroot@sha256:12345'
     build = json.dumps({"spec": {
@@ -1782,8 +1649,9 @@ def test_set_build_image_with_override(tmpdir, platforms, override):
                                                                           default_build_image)
         assert used_build_image == expected_build_image
 
+
 def test_no_platforms(tmpdir):
-    workflow = mock_workflow(tmpdir)
+    workflow = mock_workflow(tmpdir, platforms=[])
     mock_osbs()
     mock_reactor_config(tmpdir)
 
diff --git a/tests/plugins/test_pull_base_image.py b/tests/plugins/test_pull_base_image.py
index d0f361e..6317b86 100644
--- a/tests/plugins/test_pull_base_image.py
+++ b/tests/plugins/test_pull_base_image.py
@@ -377,20 +377,6 @@ class TestValidateBaseImage(object):
                                     check_platforms=True)
         assert log_message in caplog.text()
 
-    def test_manifest_list_fallback_to_orchestrate_build_args(self, caplog):
-
-        def workflow_callback(workflow):
-            self.prepare(workflow)
-            del workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY]
-            return workflow
-
-        log_message = 'manifest list for all required platforms'
-        test_pull_base_image_plugin(LOCALHOST_REGISTRY, BASE_IMAGE,
-                                    [], [], reactor_config_map=True,
-                                    workflow_callback=workflow_callback,
-                                    check_platforms=True)
-        assert log_message in caplog.text()
-
     def test_expected_platforms_unknown(self, caplog):
 
         def workflow_callback(workflow):
diff --git a/tests/plugins/test_resolve_composes.py b/tests/plugins/test_resolve_composes.py
index b0b9702..0b66ac9 100644
--- a/tests/plugins/test_resolve_composes.py
+++ b/tests/plugins/test_resolve_composes.py
@@ -267,10 +267,6 @@ class TestResolveComposes(object):
         workflow.buildstep_plugins_conf[0]['args']['platforms'] = arches
         self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map)
 
-    def test_request_compose_fallback(self, workflow, reactor_config_map):  # noqa:F811
-        del workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY]
-        self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map)
-
     def test_request_compose_for_modules(self, workflow, reactor_config_map):  # noqa:F811
         repo_config = dedent("""\
             compose:
@@ -360,8 +356,10 @@ class TestResolveComposes(object):
         for flag in flags:
             repo_config += ("    {0}: {1}\n".format(flag, flags[flag]))
         mock_repo_config(workflow._tmpdir, repo_config)
-        del workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY]
-        workflow.buildstep_plugins_conf[0]['args']['platforms'] = arches
+        if arches:
+            workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(arches)
+        else:
+            del workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY]
         tag_compose = deepcopy(ODCS_COMPOSE)
 
         sig_keys = SIGNING_INTENTS[signing_intent]
@@ -370,7 +368,7 @@ class TestResolveComposes(object):
             tag_compose['arches'] = ' '.join(arches)
             (flexmock(ODCSClient)
                 .should_receive('start_compose')
-                .with_args(source_type='tag', source=KOJI_TAG_NAME, arches=arches,
+                .with_args(source_type='tag', source=KOJI_TAG_NAME, arches=sorted(arches),
                            packages=['spam', 'bacon', 'eggs'], sigkeys=sig_keys)
                 .and_return(tag_compose).once())
         else:
-- 
2.14.3