#10 Add patches from upstream PR's to improve flatpak support
Merged 5 years ago by cverna. Opened 5 years ago by otaylor.
rpms/ otaylor/atomic-reactor f28-flatpak-improvements  into  f28

@@ -0,0 +1,821 @@ 

+ From 0ef58ca01d680c74f9776bf593990e8e9cadb98d Mon Sep 17 00:00:00 2001

+ From: "Owen W. Taylor" <otaylor@fishsoup.net>

+ Date: Tue, 31 Jul 2018 09:14:35 -0400

+ Subject: [PATCH 3/3] Move to using flatpak-module-tools as a library

+ 

+ https://github.com/projectatomic/atomic-reactor/pull/1052

+ 

+ Instead of sharing code cut-and-paste between atomic-reactor and

+ https://pagure.io/flatpak-module-tools, make atomic-reactor use

+ flatpak-module-tools as a library for the shared code. This will

+ prevent the code getting out of sync, and make future maintenance

+ easier.

+ 

+ The flatpak-module-tools version has major improvements to how

+ the filesystem tree is postprocessed to make a Flatpak using configuration

+ options from container.yaml, needing some small adjustments of the

+ tests here.

+ ---

+  atomic_reactor/plugins/exit_koji_import.py         |   6 +-

+  atomic_reactor/plugins/exit_koji_promote.py        |   6 +-

+  .../plugins/pre_flatpak_create_dockerfile.py       | 152 ++-------

+  .../plugins/pre_resolve_module_compose.py          |  12 +-

+  .../plugins/prepub_flatpak_create_oci.py           | 368 +--------------------

+  images/dockerhost-builder/Dockerfile               |   2 +-

+  images/privileged-builder/Dockerfile               |   2 +-

+  requirements-flatpak.txt                           |   1 +

+  tests/flatpak.py                                   |  17 +-

+  9 files changed, 57 insertions(+), 509 deletions(-)

+ 

+ diff --git a/atomic_reactor/plugins/exit_koji_import.py b/atomic_reactor/plugins/exit_koji_import.py

+ index 4b1e1c9..e59b557 100644

+ --- a/atomic_reactor/plugins/exit_koji_import.py

+ +++ b/atomic_reactor/plugins/exit_koji_import.py

+ @@ -24,6 +24,7 @@ from atomic_reactor.plugins.pre_reactor_config import get_openshift_session

+  

+  try:

+      from atomic_reactor.plugins.pre_flatpak_create_dockerfile import get_flatpak_source_info

+ +    from atomic_reactor.plugins.pre_resolve_module_compose import get_compose_info

+  except ImportError:

+      # modulemd and/or pdc_client not available

+      def get_flatpak_source_info(_):

+ @@ -353,7 +354,10 @@ class KojiImportPlugin(ExitPlugin):

+  

+          flatpak_source_info = get_flatpak_source_info(self.workflow)

+          if flatpak_source_info is not None:

+ -            extra['image'].update(flatpak_source_info.koji_metadata())

+ +            compose_info = get_compose_info(self.workflow)

+ +            koji_metadata = compose_info.koji_metadata()

+ +            koji_metadata['flatpak'] = True

+ +            extra['image'].update(koji_metadata)

+  

+          koji_task_owner = get_koji_task_owner(self.session, koji_task_id).get('name')

+          extra['submitter'] = self.session.getLoggedInUser()['name']

+ diff --git a/atomic_reactor/plugins/exit_koji_promote.py b/atomic_reactor/plugins/exit_koji_promote.py

+ index da33380..0fe1f47 100644

+ --- a/atomic_reactor/plugins/exit_koji_promote.py

+ +++ b/atomic_reactor/plugins/exit_koji_promote.py

+ @@ -30,6 +30,7 @@ from atomic_reactor.util import get_parent_image_koji_data

+  

+  try:

+      from atomic_reactor.plugins.pre_flatpak_create_dockerfile import get_flatpak_source_info

+ +    from atomic_reactor.plugins.pre_resolve_module_compose import get_compose_info

+  except ImportError:

+      # modulemd and/or pdc_client not available

+      def get_flatpak_source_info(_):

+ @@ -564,7 +565,10 @@ class KojiPromotePlugin(ExitPlugin):

+  

+          flatpak_source_info = get_flatpak_source_info(self.workflow)

+          if flatpak_source_info is not None:

+ -            extra['image'].update(flatpak_source_info.koji_metadata())

+ +            compose_info = get_compose_info(self.workflow)

+ +            koji_metadata = compose_info.koji_metadata()

+ +            koji_metadata['flatpak'] = True

+ +            extra['image'].update(koji_metadata)

+  

+          resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)

+          if resolve_comp_result:

+ diff --git a/atomic_reactor/plugins/pre_flatpak_create_dockerfile.py b/atomic_reactor/plugins/pre_flatpak_create_dockerfile.py

+ index 148be70..a53e0da 100644

+ --- a/atomic_reactor/plugins/pre_flatpak_create_dockerfile.py

+ +++ b/atomic_reactor/plugins/pre_flatpak_create_dockerfile.py

+ @@ -20,6 +20,8 @@ Example configuration:

+  

+  import os

+  

+ +from flatpak_module_tools.flatpak_builder import FlatpakSourceInfo, FlatpakBuilder

+ +

+  from atomic_reactor.constants import DOCKERFILE_FILENAME, YUM_REPOS_DIR

+  from atomic_reactor.plugin import PreBuildPlugin

+  from atomic_reactor.plugins.pre_resolve_module_compose import get_compose_info

+ @@ -52,67 +54,6 @@ RUN chroot /var/tmp/flatpak-build/ /bin/sh /tmp/cleanup.sh

+  '''

+  

+  

+ -class FlatpakSourceInfo(object):

+ -    def __init__(self, flatpak_yaml, compose):

+ -        self.flatpak_yaml = flatpak_yaml

+ -        self.compose = compose

+ -

+ -        mmd = compose.base_module.mmd

+ -        # A runtime module must have a 'runtime' profile, but can have other

+ -        # profiles for SDKs, minimal runtimes, etc.

+ -        self.runtime = 'runtime' in mmd.peek_profiles()

+ -

+ -        module_spec = split_module_spec(compose.source_spec)

+ -        if module_spec.profile:

+ -            self.profile = module_spec.profile

+ -        elif self.runtime:

+ -            self.profile = 'runtime'

+ -        else:

+ -            self.profile = 'default'

+ -

+ -        assert self.profile in mmd.peek_profiles()

+ -

+ -    # The module for the Flatpak runtime that this app runs against

+ -    @property

+ -    def runtime_module(self):

+ -        assert not self.runtime

+ -        compose = self.compose

+ -

+ -        dependencies = compose.base_module.mmd.props.dependencies

+ -        # A built module should have its dependencies already expanded

+ -        assert len(dependencies) == 1

+ -

+ -        for key in dependencies[0].peek_buildrequires().keys():

+ -            try:

+ -                module = compose.modules[key]

+ -                if 'runtime' in module.mmd.peek_profiles():

+ -                    return module

+ -            except KeyError:

+ -                pass

+ -

+ -        raise RuntimeError("Failed to identify runtime module in the buildrequires for {}"

+ -                           .format(compose.base_module.name))

+ -

+ -    # All modules that were build against the Flatpak runtime,

+ -    # and thus were built with prefix=/app. This is primarily the app module

+ -    # but might contain modules shared between multiple flatpaks as well.

+ -    @property

+ -    def app_modules(self):

+ -        runtime_module_name = self.runtime_module.mmd.props.name

+ -

+ -        def is_app_module(m):

+ -            dependencies = m.mmd.props.dependencies

+ -            return runtime_module_name in dependencies[0].peek_buildrequires()

+ -

+ -        return [m for m in self.compose.modules.values() if is_app_module(m)]

+ -

+ -    def koji_metadata(self):

+ -        metadata = self.compose.koji_metadata()

+ -        metadata['flatpak'] = True

+ -

+ -        return metadata

+ -

+ -

+  WORKSPACE_SOURCE_KEY = 'source_info'

+  

+  

+ @@ -157,7 +98,12 @@ class FlatpakCreateDockerfilePlugin(PreBuildPlugin):

+              raise RuntimeError(

+                  "resolve_module_compose must be run before flatpak_create_dockerfile")

+  

+ -        return FlatpakSourceInfo(flatpak_yaml, compose_info)

+ +        module_spec = split_module_spec(compose_info.source_spec)

+ +

+ +        return FlatpakSourceInfo(flatpak_yaml,

+ +                                 compose_info.modules,

+ +                                 compose_info.base_module,

+ +                                 module_spec.profile)

+  

+      def run(self):

+          """

+ @@ -168,38 +114,18 @@ class FlatpakCreateDockerfilePlugin(PreBuildPlugin):

+  

+          set_flatpak_source_info(self.workflow, source)

+  

+ -        module_info = source.compose.base_module

+ -

+ -        # For a runtime, certain information is duplicated between the container.yaml

+ -        # and the modulemd, check that it matches

+ -        if source.runtime:

+ -            flatpak_yaml = source.flatpak_yaml

+ -            flatpak_xmd = module_info.mmd.props.xmd['flatpak']

+ -

+ -            def check(condition, what):

+ -                if not condition:

+ -                    raise RuntimeError(

+ -                        "Mismatch for {} betweeen module xmd and container.yaml".format(what))

+ +        builder = FlatpakBuilder(source, None, None)

+  

+ -            check(flatpak_yaml['branch'] == flatpak_xmd['branch'], "'branch'")

+ -            check(source.profile in flatpak_xmd['runtimes'], 'profile name')

+ -

+ -            profile_xmd = flatpak_xmd['runtimes'][source.profile]

+ -

+ -            check(flatpak_yaml['id'] == profile_xmd['id'], "'id'")

+ -            check(flatpak_yaml.get('runtime', None) ==

+ -                  profile_xmd.get('runtime', None), "'runtime'")

+ -            check(flatpak_yaml.get('sdk', None) == profile_xmd.get('sdk', None), "'sdk'")

+ +        builder.precheck()

+  

+          # Create the dockerfile

+  

+ +        module_info = source.base_module

+ +

+          # We need to enable all the modules other than the platform pseudo-module

+ -        modules_str = ' '.join(sorted(m.mmd.props.name + ':' + m.mmd.props.stream

+ -                                      for m in source.compose.modules.values()

+ -                                      if m.mmd.props.name != 'platform'))

+ +        modules_str = ' '.join(builder.get_enable_modules())

+  

+ -        install_packages = module_info.mmd.peek_profiles()[source.profile].props.rpms.get()

+ -        install_packages_str = ' '.join(install_packages)

+ +        install_packages_str = ' '.join(builder.get_install_packages())

+  

+          df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME)

+          with open(df_path, 'w') as fp:

+ @@ -213,54 +139,16 @@ class FlatpakCreateDockerfilePlugin(PreBuildPlugin):

+  

+          self.workflow.builder.set_df_path(df_path)

+  

+ -        # For a runtime, we want to make sure that the set of RPMs that is installed

+ -        # into the filesystem is *exactly* the set that is listed in the runtime

+ -        # profile. Requiring the full listed set of RPMs to be listed makes it

+ -        # easier to catch unintentional changes in the package list that might break

+ -        # applications depending on the runtime. It also simplifies the checking we

+ -        # do for application flatpaks, since we can simply look at the runtime

+ -        # modulemd to find out what packages are present in the runtime.

+ -        #

+ -        # For an application, we want to make sure that each RPM that is installed

+ -        # into the filesystem is *either* an RPM that is part of the 'runtime'

+ -        # profile of the base runtime, or from a module that was built with

+ -        # flatpak-rpm-macros in the install root and, thus, prefix=/app.

+ -        #

+ -        # We achieve this by restricting the set of available packages in the dnf

+ -        # configuration to just the ones that we want.

+ -        #

+ -        # The advantage of doing this upfront, rather than just checking after the

+ -        # fact is that this makes sure that when a application is being installed,

+ -        # we don't get a different package to satisfy a dependency than the one

+ -        # in the runtime - e.g. aajohan-comfortaa-fonts to satisfy font(:lang=en)

+ -        # because it's alphabetically first.

+ -

+ -        if not source.runtime:

+ -            runtime_module = source.runtime_module

+ -            runtime_profile = runtime_module.mmd.peek_profiles()['runtime']

+ -            available_packages = sorted(runtime_profile.props.rpms.get())

+ -

+ -            for m in source.app_modules:

+ -                # Strip off the '.rpm' suffix from the filename to get something

+ -                # that DNF can parse.

+ -                available_packages.extend(x[:-4] for x in m.rpms)

+ -        else:

+ -            base_module = source.compose.base_module

+ -            runtime_profile = base_module.mmd.peek_profiles()['runtime']

+ -            available_packages = sorted(runtime_profile.props.rpms.get())

+ -

+ +        includepkgs = builder.get_includepkgs()

+          includepkgs_path = os.path.join(self.workflow.builder.df_dir, 'atomic-reactor-includepkgs')

+          with open(includepkgs_path, 'w') as f:

+ -            f.write('includepkgs = ' + ','.join(available_packages) + '\n')

+ +            f.write('includepkgs = ' + ','.join(includepkgs) + '\n')

+  

+          # Create the cleanup script

+  

+          cleanupscript = os.path.join(self.workflow.builder.df_dir, "cleanup.sh")

+          with open(cleanupscript, 'w') as f:

+ -            cleanup_commands = source.flatpak_yaml.get('cleanup-commands')

+ -            if cleanup_commands is not None:

+ -                f.write(cleanup_commands.rstrip())

+ -                f.write("\n")

+ +            f.write(builder.get_cleanup_script())

+          os.chmod(cleanupscript, 0o0755)

+  

+          # Add a yum-repository pointing to the compose

+ @@ -270,9 +158,11 @@ class FlatpakCreateDockerfilePlugin(PreBuildPlugin):

+              stream=module_info.stream,

+              version=module_info.version)

+  

+ +        compose_info = get_compose_info(self.workflow)

+ +

+          repo = {

+              'name': repo_name,

+ -            'baseurl': source.compose.repo_url,

+ +            'baseurl': compose_info.repo_url,

+              'enabled': 1,

+              'gpgcheck': 0,

+          }

+ @@ -280,4 +170,4 @@ class FlatpakCreateDockerfilePlugin(PreBuildPlugin):

+          path = os.path.join(YUM_REPOS_DIR, repo_name + '.repo')

+          self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

+  

+ -        override_build_kwarg(self.workflow, 'module_compose_id', source.compose.compose_id)

+ +        override_build_kwarg(self.workflow, 'module_compose_id', compose_info.compose_id)

+ diff --git a/atomic_reactor/plugins/pre_resolve_module_compose.py b/atomic_reactor/plugins/pre_resolve_module_compose.py

+ index 5ce6e29..df5f040 100644

+ --- a/atomic_reactor/plugins/pre_resolve_module_compose.py

+ +++ b/atomic_reactor/plugins/pre_resolve_module_compose.py

+ @@ -23,6 +23,9 @@ Example configuration:

+  }

+  """

+  

+ +

+ +from flatpak_module_tools.flatpak_builder import ModuleInfo

+ +

+  import gi

+  try:

+      gi.require_version('Modulemd', '1.0')

+ @@ -37,15 +40,6 @@ from atomic_reactor.plugins.pre_reactor_config import (get_pdc_session, get_odcs

+                                                         get_pdc, get_odcs)

+  

+  

+ -class ModuleInfo(object):

+ -    def __init__(self, name, stream, version, mmd, rpms):

+ -        self.name = name

+ -        self.stream = stream

+ -        self.version = version

+ -        self.mmd = mmd

+ -        self.rpms = rpms

+ -

+ -

+  class ComposeInfo(object):

+      def __init__(self, source_spec, compose_id, base_module, modules, repo_url):

+          self.source_spec = source_spec

+ diff --git a/atomic_reactor/plugins/prepub_flatpak_create_oci.py b/atomic_reactor/plugins/prepub_flatpak_create_oci.py

+ index b86e792..d1b6463 100644

+ --- a/atomic_reactor/plugins/prepub_flatpak_create_oci.py

+ +++ b/atomic_reactor/plugins/prepub_flatpak_create_oci.py

+ @@ -10,14 +10,7 @@ pre_flatpak_create_dockerfile, extracts the tree at /var/tmp/flatpak-build

+  and turns it into a Flatpak application or runtime.

+  """

+  

+ -import os

+ -from six.moves import configparser

+ -import re

+ -import shlex

+ -import shutil

+ -import subprocess

+ -import tarfile

+ -from textwrap import dedent

+ +from flatpak_module_tools.flatpak_builder import FlatpakBuilder

+  

+  from atomic_reactor.constants import IMAGE_TYPE_OCI, IMAGE_TYPE_OCI_TAR

+  from atomic_reactor.plugin import PrePublishPlugin

+ @@ -26,99 +19,6 @@ from atomic_reactor.rpm_util import parse_rpm_output

+  from atomic_reactor.util import get_exported_image_metadata

+  

+  

+ -# Returns flatpak's name for the current arch

+ -def get_arch():

+ -    return subprocess.check_output(['flatpak', '--default-arch'],

+ -                                   universal_newlines=True).strip()

+ -

+ -

+ -# flatpak build-init requires the sdk and runtime to be installed on the

+ -# build system (so that subsequent build steps can execute things with

+ -# the SDK). While it isn't impossible to download the runtime image and

+ -# install the flatpak, that would be a lot of unnecessary complexity

+ -# since our build step is just unpacking the filesystem we've already

+ -# created. This is a stub implementation of 'flatpak build-init' that

+ -# doesn't check for the SDK or use it to set up the build filesystem.

+ -def build_init(directory, appname, sdk, runtime, runtime_branch, tags=[]):

+ -    if not os.path.isdir(directory):

+ -        os.mkdir(directory)

+ -    with open(os.path.join(directory, "metadata"), "w") as f:

+ -        f.write(dedent("""\

+ -                       [Application]

+ -                       name={appname}

+ -                       runtime={runtime}/{arch}/{runtime_branch}

+ -                       sdk={sdk}/{arch}/{runtime_branch}

+ -                       """.format(appname=appname,

+ -                                  sdk=sdk,

+ -                                  runtime=runtime,

+ -                                  runtime_branch=runtime_branch,

+ -                                  arch=get_arch())))

+ -        if tags:

+ -            f.write("tags=" + ";".join(tags) + "\n")

+ -    os.mkdir(os.path.join(directory, "files"))

+ -

+ -

+ -# add_app_prefix('org.gimp', 'gimp, 'gimp.desktop') => org.gimp.desktop

+ -# add_app_prefix('org.gnome', 'eog, 'eog.desktop') => org.gnome.eog.desktop

+ -def add_app_prefix(app_id, root, full):

+ -    prefix = app_id

+ -    if prefix.endswith('.' + root):

+ -        prefix = prefix[:-(1 + len(root))]

+ -    return prefix + '.' + full

+ -

+ -

+ -def find_desktop_files(builddir):

+ -    desktopdir = os.path.join(builddir, 'files/share/applications')

+ -    for (dirpath, dirnames, filenames) in os.walk(desktopdir):

+ -        for filename in filenames:

+ -            if filename.endswith('.desktop'):

+ -                yield os.path.join(dirpath, filename)

+ -

+ -

+ -def find_icons(builddir, name):

+ -    icondir = os.path.join(builddir, 'files/share/icons/hicolor')

+ -    for (dirpath, dirnames, filenames) in os.walk(icondir):

+ -        for filename in filenames:

+ -            if filename.startswith(name + '.'):

+ -                yield os.path.join(dirpath, filename)

+ -

+ -

+ -def update_desktop_files(app_id, builddir):

+ -    for full_path in find_desktop_files(builddir):

+ -        cp = configparser.RawConfigParser()

+ -        cp.read([full_path])

+ -        try:

+ -            icon = cp.get('Desktop Entry', 'Icon')

+ -        except configparser.NoOptionError:

+ -            icon = None

+ -

+ -        # Does it have an icon?

+ -        if icon and not icon.startswith(app_id):

+ -            found_icon = False

+ -

+ -            # Rename any matching icons

+ -            for icon_file in find_icons(builddir, icon):

+ -                shutil.copy(icon_file,

+ -                            os.path.join(os.path.dirname(icon_file),

+ -                                         add_app_prefix(app_id, icon, os.path.basename(icon_file))))

+ -                found_icon = True

+ -

+ -            # If we renamed the icon, change the desktop file

+ -            if found_icon:

+ -                subprocess.check_call(['desktop-file-edit',

+ -                                       '--set-icon',

+ -                                       add_app_prefix(app_id, icon, icon), full_path])

+ -

+ -        # Is the desktop file not prefixed with the app id, then prefix it

+ -        basename = os.path.basename(full_path)

+ -        if not basename.startswith(app_id):

+ -            shutil.move(full_path,

+ -                        os.path.join(os.path.dirname(full_path),

+ -                                     add_app_prefix(app_id,

+ -                                                    basename[:-len('.desktop')],

+ -                                                    basename)))

+ -

+ -

+  # This converts the generator provided by the export() operation to a file-like

+  # object with a read that we can pass to tarfile.

+  class StreamAdapter(object):

+ @@ -165,133 +65,11 @@ class FlatpakCreateOciPlugin(PrePublishPlugin):

+          """

+          super(FlatpakCreateOciPlugin, self).__init__(tasker, workflow)

+  

+ -    # Compiles a list of path mapping rules to a simple function that matches

+ -    # against a list of fixed patterns, see below for rule syntax

+ -    def _compile_target_rules(rules):

+ -        ROOT = "var/tmp/flatpak-build"

+ -

+ -        patterns = []

+ -        for source, target in rules:

+ -            source = re.sub("^ROOT", ROOT, source)

+ -            if source.endswith("/"):

+ -                patterns.append((re.compile(source + "(.*)"), target, False))

+ -                patterns.append((source[:-1], target, True))

+ -            else:

+ -                patterns.append((source, target, True))

+ -

+ -        def get_target_func(self, path):

+ -            for source, target, is_exact_match in patterns:

+ -                if is_exact_match:

+ -                    if source == path:

+ -                        return target

+ -                else:

+ -                    m = source.match(path)

+ -                    if m:

+ -                        return os.path.join(target, m.group(1))

+ -

+ -            return None

+ -

+ -        return get_target_func

+ -

+ -    # Rules for mapping paths within the exported filesystem image to their

+ -    # location in the final flatpak filesystem

+ -    #

+ -    # ROOT = /var/tmp/flatpak-build

+ -    # No trailing slash - map a directory itself exactly

+ -    # trailing slash - map a directory and everything inside of it

+ -

+ -    _get_target_path_runtime = _compile_target_rules([

+ -        # We need to make sure that 'files' is created before 'files/etc',

+ -        # which wouldn't happen if just relied on ROOT/usr/ => files.

+ -        # Instead map ROOT => files and omit ROOT/usr

+ -        ("ROOT", "files"),

+ -        ("ROOT/usr", None),

+ -

+ -        # We map ROOT/usr => files and ROOT/etc => files/etc. This creates

+ -        # A conflict between ROOT/usr/etc and /ROOT/etc. Just assume there

+ -        # is nothing useful in /ROOT/usr/etc.

+ -        ("ROOT/usr/etc/", None),

+ -

+ -        ("ROOT/usr/", "files"),

+ -        ("ROOT/etc/", "files/etc")

+ -    ])

+ -

+ -    _get_target_path_app = _compile_target_rules([

+ -        ("ROOT/app/", "files")

+ -    ])

+ -

+ -    def _get_target_path(self, export_path):

+ -        if self.source.runtime:

+ -            return self._get_target_path_runtime(export_path)

+ -        else:

+ -            return self._get_target_path_app(export_path)

+ -

+      def _export_container(self, container_id):

+ -        outfile = os.path.join(self.workflow.source.workdir, 'filesystem.tar.gz')

+ -        manifestfile = os.path.join(self.workflow.source.workdir, 'flatpak-build.rpm_qf')

+ -

+          export_generator = self.tasker.d.export(container_id)

+          export_stream = StreamAdapter(export_generator)

+  

+ -        out_fileobj = open(outfile, "wb")

+ -        compress_process = subprocess.Popen(['gzip', '-c'],

+ -                                            stdin=subprocess.PIPE,

+ -                                            stdout=out_fileobj)

+ -        in_tf = tarfile.open(fileobj=export_stream, mode='r|')

+ -        out_tf = tarfile.open(fileobj=compress_process.stdin, mode='w|')

+ -

+ -        for member in in_tf:

+ -            if member.name == 'var/tmp/flatpak-build.rpm_qf':

+ -                reader = in_tf.extractfile(member)

+ -                with open(manifestfile, 'wb') as out:

+ -                    out.write(reader.read())

+ -                reader.close()

+ -            target_name = self._get_target_path(member.name)

+ -            if target_name is None:

+ -                continue

+ -

+ -            # Match the ownership/permissions changes done by 'flatpak build-export'.

+ -            # See commit_filter() in:

+ -            #   https://github.com/flatpak/flatpak/blob/master/app/flatpak-builtins-build-export.c

+ -            #

+ -            # We'll run build-export anyways in the app case, but in the runtime case we skip

+ -            # flatpak build-export and use ostree directly.

+ -            member.uid = 0

+ -            member.gid = 0

+ -            member.uname = "root"

+ -            member.gname = "root"

+ -

+ -            if member.isdir():

+ -                member.mode = 0o0755

+ -            elif member.mode & 0o0100:

+ -                member.mode = 0o0755

+ -            else:

+ -                member.mode = 0o0644

+ -

+ -            member.name = target_name

+ -            if member.islnk():

+ -                # Hard links have full paths within the archive (no leading /)

+ -                link_target = self._get_target_path(member.linkname)

+ -                if link_target is None:

+ -                    self.log.debug("Skipping %s, hard link to %s", target_name, link_target)

+ -                    continue

+ -                member.linkname = link_target

+ -                out_tf.addfile(member)

+ -            elif member.issym():

+ -                # Symlinks have the literal link target, which will be

+ -                # relative to the chroot and doesn't need rewriting

+ -                out_tf.addfile(member)

+ -            else:

+ -                f = in_tf.extractfile(member)

+ -                out_tf.addfile(member, fileobj=f)

+ -

+ -        in_tf.close()

+ -        out_tf.close()

+ -        export_stream.close()

+ -        compress_process.stdin.close()

+ -        if compress_process.wait() != 0:

+ -            raise RuntimeError("gzip failed")

+ -        out_fileobj.close()

+ +        outfile, manifestfile = self.builder._export_from_stream(export_stream)

+  

+          return outfile, manifestfile

+  

+ @@ -310,148 +88,23 @@ class FlatpakCreateOciPlugin(PrePublishPlugin):

+              self.log.info("Cleaning up docker container")

+              self.tasker.d.remove_container(container_id)

+  

+ -    def _get_components(self, manifest):

+ -        with open(manifest, 'r') as f:

+ -            lines = f.readlines()

+ -

+ -        return parse_rpm_output(lines)

+ -

+ -    def _filter_app_manifest(self, components):

+ -        runtime_rpms = self.source.runtime_module.mmd.peek_profiles()['runtime'].props.rpms

+ -

+ -        return [c for c in components if not runtime_rpms.contains(c['name'])]

+ -

+ -    def _create_runtime_oci(self, tarred_filesystem, outfile):

+ -        info = self.source.flatpak_yaml

+ -

+ -        builddir = os.path.join(self.workflow.source.workdir, "build")

+ -        os.mkdir(builddir)

+ -

+ -        repo = os.path.join(self.workflow.source.workdir, "repo")

+ -        subprocess.check_call(['ostree', 'init', '--mode=archive-z2', '--repo', repo])

+ -

+ -        id_ = info['id']

+ -        runtime_id = info.get('runtime', id_)

+ -        sdk_id = info.get('sdk', id_)

+ -        branch = info['branch']

+ -

+ -        args = {

+ -            'id': id_,

+ -            'runtime_id': runtime_id,

+ -            'sdk_id': sdk_id,

+ -            'arch': get_arch(),

+ -            'branch': branch

+ -        }

+ -

+ -        METADATA_TEMPLATE = dedent("""\

+ -            [Runtime]

+ -            name={id}

+ -            runtime={runtime_id}/{arch}/{branch}

+ -            sdk={sdk_id}/{arch}/{branch}

+ -

+ -            [Environment]

+ -            LD_LIBRARY_PATH=/app/lib64:/app/lib

+ -            GI_TYPELIB_PATH=/app/lib64/girepository-1.0

+ -            """)

+ -

+ -        with open(os.path.join(builddir, 'metadata'), 'w') as f:

+ -            f.write(METADATA_TEMPLATE.format(**args))

+ -

+ -        runtime_ref = 'runtime/{id}/{arch}/{branch}'.format(**args)

+ -

+ -        subprocess.check_call(['ostree', 'commit',

+ -                               '--repo', repo, '--owner-uid=0',

+ -                               '--owner-gid=0', '--no-xattrs',

+ -                               '--branch', runtime_ref,

+ -                               '-s', 'build of ' + runtime_ref,

+ -                               '--tree=tar=' + tarred_filesystem,

+ -                               '--tree=dir=' + builddir])

+ -        subprocess.check_call(['ostree', 'summary', '-u', '--repo', repo])

+ -

+ -        subprocess.check_call(['flatpak', 'build-bundle', repo,

+ -                               '--oci', '--runtime',

+ -                               outfile, id_, branch])

+ -

+ -        return runtime_ref

+ -

+ -    def _find_runtime_info(self):

+ -        runtime_module = self.source.runtime_module

+ -

+ -        flatpak_xmd = runtime_module.mmd.props.xmd['flatpak']

+ -        runtime_id = flatpak_xmd['runtimes']['runtime']['id']

+ -        sdk_id = flatpak_xmd['runtimes']['runtime'].get('sdk', runtime_id)

+ -        runtime_version = flatpak_xmd['branch']

+ -

+ -        return runtime_id, sdk_id, runtime_version

+ -

+ -    def _create_app_oci(self, tarred_filesystem, outfile):

+ -        info = self.source.flatpak_yaml

+ -        app_id = info['id']

+ -        app_branch = info.get('branch', 'master')

+ -

+ -        builddir = os.path.join(self.workflow.source.workdir, "build")

+ -        os.mkdir(builddir)

+ -

+ -        repo = os.path.join(self.workflow.source.workdir, "repo")

+ -

+ -        runtime_id, sdk_id, runtime_version = self._find_runtime_info()

+ -

+ -        # See comment for build_init() for why we can't use 'flatpak build-init'

+ -        # subprocess.check_call(['flatpak', 'build-init',

+ -        #                        builddir, app_id, runtime_id, runtime_id, runtime_version])

+ -        build_init(builddir, app_id, sdk_id, runtime_id, runtime_version, tags=info.get('tags', []))

+ -

+ -        # with gzip'ed tarball, tar is several seconds faster than tarfile.extractall

+ -        subprocess.check_call(['tar', 'xCfz', builddir, tarred_filesystem])

+ -

+ -        update_desktop_files(app_id, builddir)

+ -

+ -        finish_args = []

+ -        if 'finish-args' in info:

+ -            # shlex.split(None) reads from standard input, so avoid that

+ -            finish_args = shlex.split(info['finish-args'] or '')

+ -        if 'command' in info:

+ -            finish_args = ['--command', info['command']] + finish_args

+ -

+ -        subprocess.check_call(['flatpak', 'build-finish'] + finish_args + [builddir])

+ -        subprocess.check_call(['flatpak', 'build-export', repo, builddir, app_branch])

+ -

+ -        subprocess.check_call(['flatpak', 'build-bundle', repo, '--oci',

+ -                               outfile, app_id, app_branch])

+ -

+ -        app_ref = 'app/{app_id}/{arch}/{branch}'.format(app_id=app_id,

+ -                                                        arch=get_arch(),

+ -                                                        branch=app_branch)

+ -

+ -        return app_ref

+ -

+      def run(self):

+          self.source = get_flatpak_source_info(self.workflow)

+          if self.source is None:

+              raise RuntimeError("flatpak_create_dockerfile must be run before flatpak_create_oci")

+  

+ +        self.builder = FlatpakBuilder(self.source, self.workflow.source.workdir,

+ +                                      'var/tmp/flatpak-build',

+ +                                      parse_manifest=parse_rpm_output)

+ +

+          tarred_filesystem, manifest = self._export_filesystem()

+          self.log.info('filesystem tarfile written to %s', tarred_filesystem)

+          self.log.info('manifest written to %s', manifest)

+  

+ -        all_components = self._get_components(manifest)

+ -        if self.source.runtime:

+ -            image_components = all_components

+ -        else:

+ -            image_components = self._filter_app_manifest(all_components)

+ -

+ -        self.log.info("Components:\n%s",

+ -                      "\n".join("        {name}-{epoch}:{version}-{release}.{arch}.rpm"

+ -                                .format(**c) for c in image_components))

+ -

+ +        image_components = self.builder.get_components(manifest)

+          self.workflow.image_components = image_components

+  

+ -        outfile = os.path.join(self.workflow.source.workdir, 'flatpak-oci-image')

+ -

+ -        if self.source.runtime:

+ -            ref_name = self._create_runtime_oci(tarred_filesystem, outfile)

+ -        else:

+ -            ref_name = self._create_app_oci(tarred_filesystem, outfile)

+ +        ref_name, outfile, tarred_outfile = self.builder.build_container(tarred_filesystem)

+  

+          metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI)

+          metadata['ref_name'] = ref_name

+ @@ -459,11 +112,6 @@ class FlatpakCreateOciPlugin(PrePublishPlugin):

+  

+          self.log.info('OCI image is available as %s', outfile)

+  

+ -        tarred_outfile = outfile + '.tar'

+ -        with tarfile.TarFile(tarred_outfile, "w") as tf:

+ -            for f in os.listdir(outfile):

+ -                tf.add(os.path.join(outfile, f), f)

+ -

+          metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR)

+          metadata['ref_name'] = ref_name

+          self.workflow.exported_image_sequence.append(metadata)

+ diff --git a/images/dockerhost-builder/Dockerfile b/images/dockerhost-builder/Dockerfile

+ index 11368b9..7e5be3e 100644

+ --- a/images/dockerhost-builder/Dockerfile

+ +++ b/images/dockerhost-builder/Dockerfile

+ @@ -1,5 +1,5 @@

+  FROM fedora:latest

+ -RUN dnf -y install docker git python-docker-py python-setuptools desktop-file-utils e2fsprogs flatpak koji libmodulemd ostree python2-gobject-base python-backports-lzma osbs gssproxy && dnf clean all

+ +RUN dnf -y install docker git python-docker-py python-setuptools desktop-file-utils e2fsprogs flatpak koji libmodulemd ostree python2-gobject-base python2-flatpak-module-tools python-backports-lzma osbs gssproxy && dnf clean all

+  ADD ./atomic-reactor.tar.gz /tmp/

+  RUN cd /tmp/atomic-reactor-*/ && python setup.py install

+  CMD ["atomic-reactor", "--verbose", "inside-build"]

+ diff --git a/images/privileged-builder/Dockerfile b/images/privileged-builder/Dockerfile

+ index a6fa7fd..75653bf 100644

+ --- a/images/privileged-builder/Dockerfile

+ +++ b/images/privileged-builder/Dockerfile

+ @@ -1,5 +1,5 @@

+  FROM fedora:latest

+ -RUN dnf -y install docker git python-docker-py python-setuptools desktop-file-utils e2fsprogs flatpak koji libmodulemd ostree python2-gobject-base python-backports-lzma osbs gssproxy && dnf clean all

+ +RUN dnf -y install docker git python-docker-py python-setuptools desktop-file-utils e2fsprogs flatpak koji libmodulemd ostree python2-gobject-base python2-flatpak-module-tools python-backports-lzma osbs gssproxy && dnf clean all

+  ADD ./atomic-reactor.tar.gz /tmp/

+  RUN cd /tmp/atomic-reactor-*/ && python setup.py install

+  ADD ./docker.sh /tmp/docker.sh

+ diff --git a/requirements-flatpak.txt b/requirements-flatpak.txt

+ index a751b49..acb27ca 100644

+ --- a/requirements-flatpak.txt

+ +++ b/requirements-flatpak.txt

+ @@ -1 +1,2 @@

+ +flatpak-module-tools

+  pdc-client

+ diff --git a/tests/flatpak.py b/tests/flatpak.py

+ index 2ac42e9..c412284 100644

+ --- a/tests/flatpak.py

+ +++ b/tests/flatpak.py

+ @@ -1,11 +1,12 @@

+  import yaml

+  

+ +from atomic_reactor.util import split_module_spec

+ +

+  try:

+ -    from atomic_reactor.plugins.pre_resolve_module_compose import (ModuleInfo,

+ -                                                                   ComposeInfo,

+ +    from atomic_reactor.plugins.pre_resolve_module_compose import (ComposeInfo,

+                                                                     set_compose_info)

+ -    from atomic_reactor.plugins.pre_flatpak_create_dockerfile import (FlatpakSourceInfo,

+ -                                                                      set_flatpak_source_info)

+ +    from atomic_reactor.plugins.pre_flatpak_create_dockerfile import set_flatpak_source_info

+ +    from flatpak_module_tools.flatpak_builder import FlatpakSourceInfo, ModuleInfo

+      from gi.repository import Modulemd

+      MODULEMD_AVAILABLE = True

+  except ImportError:

+ @@ -123,6 +124,9 @@ flatpak:

+      # Test overriding the automatic "first executable in /usr/bin'

+      command: eog2

+      tags: ["Viewer"]

+ +    copy-icon: true

+ +    rename-desktop-file: eog.desktop

+ +    rename-icon: eog

+      finish-args: >

+  """ + "".join("        {}\n".format(a) for a in FLATPAK_APP_FINISH_ARGS)

+  

+ @@ -342,7 +346,10 @@ def setup_flatpak_source_info(workflow, config=APP_CONFIG):

+  

+      flatpak_yaml = yaml.safe_load(config['container_yaml'])['flatpak']

+  

+ -    source = FlatpakSourceInfo(flatpak_yaml, compose)

+ +    module_spec = split_module_spec(compose.source_spec)

+ +

+ +    source = FlatpakSourceInfo(flatpak_yaml, compose.modules, compose.base_module,

+ +                               module_spec.profile)

+      set_flatpak_source_info(workflow, source)

+  

+      return source

+ -- 

+ 2.14.3

+ 

file modified
+13 -2
@@ -33,7 +33,7 @@ 

  

  Name:           %{project}

  Version:        1.6.33

- Release:        2%{?dist}

+ Release:        3%{?dist}

  

  Summary:        Improved builder for Docker images

  Group:          Development/Tools
@@ -44,6 +44,12 @@ 

  # jsonschema version is pinned in requirement.txt to the rhel/centos 7 version

  # we want to use the latest from Fedora repos.

  Patch0:         atomic-reactor-jsonschema-version.patch

+ # https://github.com/projectatomic/atomic-reactor/pull/1048

+ Patch1:         flatpak_create_oci-Fix-docker-compatibility-issues.patch

+ # https://github.com/projectatomic/atomic-reactor/pull/1046

+ Patch2:         Clean-up-platform-handling-and-restrict-platforms-by.patch

+ # https://github.com/projectatomic/atomic-reactor/pull/1052

+ Patch3:         Move-to-using-flatpak-module-tools-as-a-library.patch

  

  BuildArch:      noarch

  
@@ -227,7 +233,7 @@ 

  

  

  %prep

- %autosetup -n %{name}-%{commit}

+ %autosetup -p1 -n %{name}-%{commit}

  

  %build

  %py2_build
@@ -407,6 +413,11 @@ 

  

  

  %changelog

+ * Wed Aug  1 2018 Owen Taylor <otaylor@redhat.com> - 1.6.33-3

+  - Add patch to fix compatibility issues with recent docker/docker-py

+  - Add patch to drop platforms from target tag that aren't in the cluster config

+  - Add patch to use Flatpak code from flatpak-module-tools, rather than cut-and-paste

+ 

  * Tue Jul 31 2018 Clement Verna <cverna@fedoraproject.org> - 1.6.33-2

  - Add patch to manage jsonschema version

  

@@ -0,0 +1,126 @@ 

+ From 08be0466a4308f085bc1255a8f7465a0763cdc23 Mon Sep 17 00:00:00 2001

+ From: "Owen W. Taylor" <otaylor@fishsoup.net>

+ Date: Wed, 18 Jul 2018 15:46:29 -0400

+ Subject: [PATCH 1/3] flatpak_create_oci: Fix docker compatibility issues

+ 

+ https://github.com/projectatomic/atomic-reactor/pull/1048

+ 

+ pre_flatpak_create_dockerfile.py: provide a command when creating container

+ 

+ With recent versions of the Docker daemon and/or particular base images,

+ creating the container to export the filesystem for Flatpak creation fails

+ because no command is provided. Provide a dummy command.

+ 

+ Handle generator returned by docker.ApiClient.export()

+ 

+ Current versions of the Python docker client return a generator from

+ ApiClient.export() rather than a file-like object. Wrap this in

+ a converter class to pass it to tarfile.

+ ---

+  .../plugins/prepub_flatpak_create_oci.py           | 44 +++++++++++++++++++++-

+  tests/plugins/test_flatpak_create_oci.py           | 14 ++++++-

+  2 files changed, 54 insertions(+), 4 deletions(-)

+ 

+ diff --git a/atomic_reactor/plugins/prepub_flatpak_create_oci.py b/atomic_reactor/plugins/prepub_flatpak_create_oci.py

+ index b4ef704..b86e792 100644

+ --- a/atomic_reactor/plugins/prepub_flatpak_create_oci.py

+ +++ b/atomic_reactor/plugins/prepub_flatpak_create_oci.py

+ @@ -119,6 +119,41 @@ def update_desktop_files(app_id, builddir):

+                                                      basename)))

+  

+  

+ +# This converts the generator provided by the export() operation to a file-like

+ +# object with a read that we can pass to tarfile.

+ +class StreamAdapter(object):

+ +    def __init__(self, gen):

+ +        self.gen = gen

+ +        self.buf = None

+ +        self.pos = None

+ +

+ +    def read(self, count):

+ +        pieces = []

+ +        remaining = count

+ +        while remaining > 0:

+ +            if not self.buf:

+ +                try:

+ +                    self.buf = next(self.gen)

+ +                    self.pos = 0

+ +                except StopIteration:

+ +                    break

+ +

+ +            if len(self.buf) - self.pos < remaining:

+ +                pieces.append(self.buf[self.pos:])

+ +                remaining -= (len(self.buf) - self.pos)

+ +                self.buf = None

+ +                self.pos = None

+ +            else:

+ +                pieces.append(self.buf[self.pos:self.pos + remaining])

+ +                self.pos += remaining

+ +                remaining = 0

+ +

+ +        return b''.join(pieces)

+ +

+ +    def close(self):

+ +        pass

+ +

+ +

+  class FlatpakCreateOciPlugin(PrePublishPlugin):

+      key = 'flatpak_create_oci'

+      is_allowed_to_fail = False

+ @@ -195,7 +230,9 @@ class FlatpakCreateOciPlugin(PrePublishPlugin):

+          outfile = os.path.join(self.workflow.source.workdir, 'filesystem.tar.gz')

+          manifestfile = os.path.join(self.workflow.source.workdir, 'flatpak-build.rpm_qf')

+  

+ -        export_stream = self.tasker.d.export(container_id)

+ +        export_generator = self.tasker.d.export(container_id)

+ +        export_stream = StreamAdapter(export_generator)

+ +

+          out_fileobj = open(outfile, "wb")

+          compress_process = subprocess.Popen(['gzip', '-c'],

+                                              stdin=subprocess.PIPE,

+ @@ -261,7 +298,10 @@ class FlatpakCreateOciPlugin(PrePublishPlugin):

+      def _export_filesystem(self):

+          image = self.workflow.image

+          self.log.info("Creating temporary docker container")

+ -        container_dict = self.tasker.d.create_container(image)

+ +        # The command here isn't used, since we only use the container for export,

+ +        # but (in some circumstances) the docker daemon will error out if no

+ +        # command is specified.

+ +        container_dict = self.tasker.d.create_container(image, command=["/bin/bash"])

+          container_id = container_dict['Id']

+  

+          try:

+ diff --git a/tests/plugins/test_flatpak_create_oci.py b/tests/plugins/test_flatpak_create_oci.py

+ index 2e49309..a8c2590 100644

+ --- a/tests/plugins/test_flatpak_create_oci.py

+ +++ b/tests/plugins/test_flatpak_create_oci.py

+ @@ -721,14 +721,24 @@ def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, breakage, mock_f

+  

+      export_stream = open(filesystem_tar, "rb")

+  

+ +    def stream_to_generator(s):

+ +        while True:

+ +            # Yield small chunks to test the StreamAdapter code better

+ +            buf = s.read(100)

+ +            if len(buf) == 0:

+ +                return

+ +            yield buf

+ +

+ +    export_generator = stream_to_generator(export_stream)

+ +

+      (flexmock(docker_tasker.d.wrapped)

+       .should_receive('create_container')

+ -     .with_args(workflow.image)

+ +     .with_args(workflow.image, command=["/bin/bash"])

+       .and_return({'Id': CONTAINER_ID}))

+      (flexmock(docker_tasker.d.wrapped)

+       .should_receive('export')

+       .with_args(CONTAINER_ID)

+ -     .and_return(export_stream))

+ +     .and_return(export_generator))

+      (flexmock(docker_tasker.d.wrapped)

+       .should_receive('remove_container')

+       .with_args(CONTAINER_ID))

+ -- 

+ 2.14.3

+ 

Add patch to fix compatibility issues with recent docker/docker-py [merged upstream]
Add patch to drop platforms from target tag that aren't in the cluster config
Add patch to use Flatpak code from flatpak-module-tools, rather than cut-and-paste

Pull-Request has been merged by cverna

5 years ago