view flatpak/build_manifest.py @ 176:e7df6534e387 default tip

association: ajout du compte rendu de l'AG extraordinaire du 17 décembre 2022
author Goffi <goffi@goffi.org>
date Tue, 17 Jan 2023 14:13:58 +0100
parents a213053a03be
children
line wrap: on
line source

#!/usr/bin/env python3

import tempfile
import subprocess
from pathlib import Path
from typing import Optional, List
from dataclasses import dataclass
import hashlib
from ftplib import FTP
from urllib.parse import urlparse
from textwrap import dedent
import sys
import os
import json
import time
import argparse
import shutil
from packaging.version import parse as parse_version
import requests
from lxml import etree


CACHE_LIMIT = 3600 * 24
PYTHON_DEP_OVERRIDE = {
    "pygments": {
        "build-commands": [
            # pygments being in freedesktop SDK, we need to use --ignore-installed flag
            'pip3 install --ignore-installed --exists-action=i --no-index --find-links='
            '"file://${PWD}" --prefix=${FLATPAK_DEST} . --no-build-isolation'
        ]
    }
}
PYTHON_DEP_PREINSTALL = [
    "wheel",
]
PYTHON_DEP_REQUIREMENTS_UPDATE = {
    # those modules are not seen as a twisted requirement, so they're sometimes misplaced
    'twisted': ['service-identity', 'pyopenssl'],
}
PYTHON_SETUP_REQUIREMENTS = [
    # needed for cryptography
    'toml',
    'semantic_version',

    'setuptools_scm',
    'docutils',  # needed by m2r
    'mistune',  # needed by m2r
    'm2r',  # needed by automat
]
DEFAULT_MANIFEST = {
    "runtime": "org.freedesktop.Platform",
    "runtime-version": "20.08",
    "sdk": "org.freedesktop.Sdk",
    "command": "libervia_wrapper",
    "finish-args": [
        "--socket=session-bus",
        "--share=network",
        "--filesystem=home"
    ],
}
SHOW_REQUIRES_HEADER = 'Requires: '
SHOW_VERSION_HEADER = 'Version: '
SETTINGS_KEY = '_build_settings'
APPDATA_RELEASE_DEV_TEXT = dedent("""\
    This is a development version, used as a preview.
    Please note that it is incomplete and it probably contains bugs.
    """)
OVERWRITE_WARNING = "{} already exists, do you want to overwrite it (y/N)? "

# packages already installed in flatpak runtime
PYTHON_DEPS_IGNORE = ("setuptools", "cython", "six", "markdown")
PYTHON_VERSION_FORCE = {
    # we have to restrict idna due to requests's idna<3 requirements
    # (as of requests 2.25.1)
    # TODO: check if this requirements is still there
    'idna': '2.10',
    # next versions need RUST
    # TODO: upgrade
    'cryptography': '3.3.2',
}


PIP_CMD = (
    'pip3 install --exists-action=i --no-index --find-links="file://${{PWD}}" '
    '--prefix=${{FLATPAK_DEST}} {package} --no-build-isolation'
)

pkg_repos = {}
python_setup_requirements = PYTHON_SETUP_REQUIREMENTS.copy()


@dataclass
class Package:
    name: str
    version: str
    hash_: str
    url: str
    requirements: List[str]
    repos: Optional[dict]

    def __hash__(self):
        return hash(self.name)

    def __eq__(self, other):
        return self.name == other.name


def print_step(step):
    print(
        "\n"
        f"┌{'─' * (len(step) + 2)}\n"
        f"│ {step}\n"
        f"└{'─' * (len(step) + 2)}\n"
        "\n"
    )


def parse_args():
    parser = argparse.ArgumentParser(
        description='Flatpak manifest builder for Libervia')

    build_group = parser.add_argument_group('building', 'options used to generate the manifest')
    export_group = parser.add_argument_group('export', 'otions used to building files')

    # build group
    build_group.add_argument('-f', '--force', action="store_true",
                        help="force overwritting of existing manifest and appdata file")
    build_group.add_argument('--ignore-cache', action='append', default=[],
                        help='ignore the cache of this step ("all" to ignore all caches)')
    build_group.add_argument(
        '--deps-dir',
        help="use this directory to build_group python dependencies (it won't be deleted at "
             "the end, useful when you want to re-use it and avoir re-downloading)")
    # build_group.add_argument('--no-binary', help="don't use binary packages")

    # export group
    export_group.add_argument('-s', '--symlink', choices=['no', 'all', 'cache'],
        default='no',
        help='"no" to copy all files, "all" to symlink all files, "cache" to copy files '
             'and symlink only cache (default: "no")')
    export_group.add_argument('-F', '--force-export', action='store_true',
        help='force overwritting of existing files/symlinks when exporting (DEFAULT: '
             'existing files are skipped)')
    export_group.add_argument('-e', '--export', type=Path,
        help="export build files to this directory (DEFAULT: don't export files)")

    # common
    parser.add_argument('name', type=str, help="name of the package to build_group")
    parser.add_argument('version', type=str, help="version of the package to build_group")

    args = parser.parse_args()
    # FIXME: no_binary is currently not managed because version parser doesn't handle
    #        wheels
    args.no_binary = True

    if 'all' in args.ignore_cache and len(args.ignore_cache) != 1:
        parser.error('"all" cannot be used with step names in --ignore-cache, '
                     'use it alone')

    if args.export is not None and not args.export.is_dir():
        parser.error(f"{args.export} is not a directory!")

    return args


## useful methods ##

def get_cache(name):
    """Retrieve cache for a step

    cache will be ignored if too old, or explicitly ignored by user
    @param name(str): name of the step
    @return (object): cached data
    """
    # name of the cache without prefix, mainly used for python_deps
    shortname = name.split('__', 1)[0]

    if shortname in args.ignore_cache or 'all' in args.ignore_cache:
        print(f"ignoring cache for {shortname}")
        if shortname in args.ignore_cache:
            args.ignore_cache.remove(shortname)
        return None
    try:
        cache_name = cache[name]
        cache_updated = cache_name['updated']
    except KeyError:
        return None
    if time.time() - cache_updated > CACHE_LIMIT:
        print(f"cache for {name} is too old, we won't use it")
        return None
    print(f"cache found for {shortname}")
    return cache_name['data']


def set_cache(name, data):
    """Update cache for a step

    @param name(str): name of the step
    @param data(object): data to cache
    """
    cache_name = cache.setdefault(name, {})
    cache_name['updated'] = time.time()
    cache_name['data'] = data
    with cache_path.open('w') as f:
        json.dump(cache, f)


def get_python_package(package_name, step_name=None, step_message=None, with_pre=False,
                       version=None):
    """Generate module for a Python package

    @param package_name(str, list[str]): name of the Python package
        use list of str to get several packages at once.
        A (name, version) tuple (or any size 2 iterable) can be used to force version of a
        package.
        If package is a list with iterables, it will be modified in place to keep only the
        name as string. This is because version is not needed anymore after that, but
        name is still used to remove dependencies already installed with setup
        requirements.
    @param step_name(None, str): name of the step, None to use package_name
    @param step_message(None, str): message of the step (None to use default one)
        use empty string to disable it
    @param with_pre(bool): if True retrieve also pre-releases
    @param version(str, None): indicate the version of the package to download
        if None, the most recent version compativle with `with_pre` will be used
    """
    single = isinstance(package_name, str)
    if step_name is None:
        step_name = package_name if single else ' - '.join(package_name)
    if step_message != "":
        print_step(step_message or f"retrieving latest version of {package_name}")
    cache = get_cache(step_name)
    if cache is not None:
        return cache
    package_names = [package_name] if single else package_name
    data = []

    for idx, name in enumerate(package_names):
        if not isinstance(name, str):
            if version is not None:
                raise ValueError(
                    "forced version can't be used with packages versions (i.e. tuples)")
            name, forced_version = name
            package_names[idx] = name
        else:
            forced_version = version
        r = requests.get(f"https://pypi.org/pypi/{name}/json")
        r.raise_for_status()

        releases_data = []
        for version_raw, release_data in r.json()["releases"].items():
            if forced_version is not None and forced_version != version_raw:
                continue
            release_version = parse_version(version_raw)
            if release_version.is_prerelease and not with_pre:
                continue
            releases_data.append((release_version, release_data))

        # we sort releases by version to be sure to have latests one
        releases_data.sort(key=lambda d: d[0])

        try:
            release_version, release_data = releases_data[-1]
            package = next(r for r in release_data if r["packagetype"] == "sdist")
        except (IndexError, StopIteration):
            raise RuntimeError(f"Can't find a matching package for {name}")

        print(f"{name} {release_version} will be used")
        dep_data = {
            "name": name,
            "buildsystem": "simple",
            "build-commands": [
                PIP_CMD.format(package=".")
            ],
            "sources": [
                {
                    "type": "archive",
                    "url": package["url"],
                    "sha256": package["digests"]["sha256"],
                }
            ],
        }
        data.append(dep_data)

    set_cache(step_name, data)
    return data


def file_upload(filename, dest="/app/bin", src=None, replace=None, use_install=False):
    """Generate manifest modules to upload a local file

    @param filename(str): name of the local file to upload
        Note that it will be overwritted if replace is used
    @param dest(str, Path): path where the file must be installed in the container
    @param src(str, None): when replace is used, name of the source file
    @param replace(dict, None): mapping of text to replace
        Use when file must be dynamicly adapted to the container, note that src must be
        set to the name of the template file if this is used.
        e.g. {'##COMMAND##', 'cagou'} will replace all '##COMMAND##' in filename
        by 'cagou'
    @param use_install(bool): if True, install file with `install src dest`
        else, use `mv src dest`
    @return (list(dict)): modules to add to the manifest
    """
    assert isinstance(filename, str) and '/' not in filename
    print_step(f"generating {filename} module")
    # no cache for this one, to be sure to have always latest version
    filename = Path(filename)
    if src is None:
        file_to_test = filename
    else:
        src = Path(src)
        file_to_test = src
    if not file_to_test.exists():
        raise RuntimeError(
            f"{file_to_test} is missing, it is needed to build the manifest!")

    if replace:
        if src is None:
            raise ValueError(
                '"src" must be set to name of template file if replace is used')
        print(f'doing replacement in template "{src}" to dest "{filename}"')
        # there are strings to replace, we read file, do the changes and dump the
        # result in <filename>
        with open(src) as f:
            buff = f.read()

        for old, new in replace.items():
            buff = buff.replace(old, new)

        with open(filename, 'w') as f:
            f.write(buff)
    else:
        if src is not None:
            raise ValueError('"src" must not be used if replace is not used')

    with filename.open('rb') as f:
        hash_ = hashlib.sha256(f.read()).hexdigest()

    dest = Path(dest)

    dep_data = {
        "name": str(filename),
        "buildsystem": "simple",
        "build-commands": [
        ],
        "sources": [
            {
                "type": "file",
                "path": str(filename),
                "sha256": hash_,
            }
        ]
    }

    build_commands = dep_data['build-commands']
    if use_install:
        build_commands.append(f"install -Dv {filename} {dest}")
    else:
        if dest.as_posix() not in ('/app', '/app/bin'):
            # just in case the destination directory doesn't exist
            build_commands.append(f"mkdir -p {dest.parent}")
        build_commands.append(f"mv -v {filename} {dest}")

    return [dep_data]


def file_from_url(url, dest=None, step_name=None, step_message=None):
    """Generate manifest modules for a file either on a http(s) url or local

    @param url(str): url of the file to use, or local path
        if it starts with http, url will be used, else file_upload(url) will be used
    @param dest(str, None): path were the file should be copied
    """
    parsed = urlparse(url)
    if not parsed.scheme:
        return file_upload(url)
    if not parsed.scheme.startswith('http'):
        raise NotImplementedError (
            f'we can\'t use the URL "{url}", the scheme "{parsed.scheme}" is not managed')

    filepath = Path(parsed.path)
    stem = filepath.stem

    if step_name is None:
        # we use this name to easily ignore cache (with stem) while avoiding
        # conflict if we have 2 URLs with the same stem
        step_name = f"{stem}__{url}"

    if step_message is None:
        step_message = f"generating module for {stem}"

    print_step(step_message)
    cache = get_cache(step_name)
    if cache is not None:
        return cache

    r = requests.get(url)
    r.raise_for_status()
    file_hash = hashlib.sha256(r.content).hexdigest()

    dep_data = {"name": stem}

    if dest is not None:
        dest = Path(dest)
        dep_data.update({
            'buildsystem': 'simple',
            'build-commands':[
                f'mkdir -p {dest.parent}',
                f'mv "{filepath.name}" "{dest}"',
            ]
        })

    dep_data['sources'] = [
        {
            "type": "file",
            "url": url,
            "sha256": file_hash,
        }
    ]

    data = [dep_data]
    set_cache(step_name, data)
    return data


def get_requirements(pip, package_name, done=None, requirements=None, tree=None, indent=0):
    """Retrieve requirements for an installed python package

    @param pip(Path): path to pip executable to use
        package must be installed in the environment of this pip
    @param package_name(str): name of the package to retrieve
    @return(list[(str,str)]): list of ordered requirements (package_name, version)
    """
    print(f"found dependency: {package_name}")
    if requirements is None:
        assert done is None
        assert tree is None
        if not package_name == canonical(package_name):
            raise ValueError("main package name is not canonical")
        tree = []
        requirements = []
        done = []
        main = True
    else:
        main = False
    done.append(package_name)
    show_cplted = subprocess.run([pip,  "show", package_name],
                                 capture_output=True, text=True)
    show_cplted.check_returncode()

    lines = show_cplted.stdout.split('\n')
    version_raw = next(l for l in lines if l.startswith(SHOW_VERSION_HEADER))
    version = version_raw[len(SHOW_VERSION_HEADER):]
    requirement_raw = next(l for l in lines if l.startswith(SHOW_REQUIRES_HEADER))
    requirement_raw = requirement_raw[len(SHOW_REQUIRES_HEADER):]
    requirements_canon = [canonical(p) for p in requirement_raw.split(',') if p.strip()]
    requirements_update = [
        canonical(r) for r in PYTHON_DEP_REQUIREMENTS_UPDATE.get(package_name, [])]
    new_requirements = set(requirements_update).difference(requirements_canon)
    if new_requirements:
        print("adding extra requirements to {}: {}".format(
            package_name, ', '.join(new_requirements)))
        requirements_canon.extend(new_requirements)
    tree.append(f"{'    '*indent}{package_name} ({version})")
    for r in requirements_canon:
        if r not in done:
            get_requirements(pip, r, done, requirements, tree, indent+1)
    req_tuple = (package_name, version, requirements_canon)
    requirements.append(req_tuple)
    if main:
        print("\ndependency tree:\n")
        print('\n'.join(tree))
    return requirements


def get_hg_id_date(path):
    """Get short identifier and date of current commit from given Mercurial repository

    version is retrieve with `hg id`, a "+" is appended after shortrev if it has
    been modified.
    @param path(str, Path): path to the repository
    @return(tuple(str, date)): found revision + iso date
    """
    hg_cplted = subprocess.run(
        ["hg",  "id", "--template", "{id|short}{dirty}\n{date|isodate}", path],
        capture_output=True, text=True)
    hg_cplted.check_returncode()
    return hg_cplted.stdout.split('\n')


def get_cache_dir():
    """Return path to directory to use for cache"""
    return Path(f"cache_{app_id}")


def canonical(name):
    """Get canonical name of a package"""
    return name.lower().replace('_', '-').strip()


modules = []
cache_path = Path.home() / Path('.cache/sat_flatpak_cache.json')
if not cache_path.exists():
    cache = {}
else:
    with cache_path.open() as f:
        cache = json.load(f)


## steps ##

def get_libxslt():
    """Generate manifest module to install libxslt (needed for lxml)"""
    step_name = 'libxslt'
    print_step("retrieving latest version of libxslt")
    cache = get_cache(step_name)
    if cache is not None:
        return cache

    ftp = FTP("xmlsoft.org")
    ftp.login()
    ftp.cwd("libxml2")
    libxslt_archives = [l for l in ftp.nlst() if 'xslt' in l
                        and l.endswith('tar.gz')
                        and 'git' not in l
                        and 'rc' not in l]
    latest_libxslt = libxslt_archives[-1]
    print(f"latest libxslt found: {latest_libxslt}")

    with tempfile.TemporaryDirectory() as tmp_dirname:
        tmp_dir = Path(tmp_dirname)
        file_path = tmp_dir / latest_libxslt
        with file_path.open('wb+') as f:
            ftp.retrbinary('RETR ' + latest_libxslt, f.write)
            f.seek(0)
            libxslt_hash = hashlib.sha256(f.read()).hexdigest()

    ftp.quit()

    print(f"libxstl hash: {libxslt_hash}")

    data = [{
        "name": "libxslt",
        "sources": [
            {
                "type": "archive",
                "url": f"ftp://xmlsoft.org/libxml2/{latest_libxslt}",
                "sha256": libxslt_hash,
            }
        ]
    }]
    set_cache(step_name, data)
    return data


def get_python_deps():
    """Generate manifest modules for python dependencies of main package"""
    step_name = f'python_deps__{app_id}'
    print_step("retrieving python dependencies")
    cache = get_cache(step_name)
    if cache is not None:
        return cache

    with tempfile.TemporaryDirectory() as tmp_dirname:
        if args.deps_dir is not None:
            # we ignore the created temporary directory is we have an other one specified
            tmp_dirname = args.deps_dir
        tmp_dir = Path(tmp_dirname)
        env_dir = tmp_dir / 'env'
        pip = env_dir / 'bin' / 'pip'
        if env_dir.exists():
            print("packages are already installed")
        else:
            print(f"working in temporary directory {tmp_dirname}")
            venv_cplted = subprocess.run(["/usr/bin/env", "python3", "-m", "venv", env_dir])
            venv_cplted.check_returncode()
            print("\ninstalling package\n")
            if PYTHON_DEP_PREINSTALL:
                print("preinstalling packages")
                for dep in PYTHON_DEP_PREINSTALL:
                    inst_cplted = subprocess.run([pip, "install", dep])
                    inst_cplted.check_returncode()

            # we install package to have a good map of requirements, pypi metadata
            # are incomplete. Packages should be retrieved from pip cache
            if isinstance(main_package_source, Path):
                inst_cplted = subprocess.run(
                    [pip, "install", "--progress-bar", "emoji", "-r", "requirements.txt"],
                    cwd = main_package_source
                )
            else:
                inst_cplted = subprocess.run(
                    [pip, "install", "--progress-bar", "emoji", main_package_source])
            inst_cplted.check_returncode()


        print("checking package coming from VCS")
        freeze_cplted = subprocess.run(
            [pip,  "freeze"], capture_output=True, text=True
        )
        freeze_cplted.check_returncode()
        lines = freeze_cplted.stdout.split('\n')
        for line in lines:
            if " @ " in line:
                pkg, reference = line.split(" @ ")
                pkg = canonical(pkg)
                if pkg == main_package:
                    continue
                if reference.startswith("hg+http"):
                    url = reference[3:].split('@')[0]
                    pkg_repos[pkg] = {"type": "hg", "url": url}
                    print(f"found {pkg!r} from {url!r} (Mercurial)")

        print_step("analyzing python dependencies")
        deps_map = {}

        requirements = get_requirements(pip, main_package)


        print("\nretrieveing metadata on PyPi\n")

        if not args.no_binary:
            print(r"/!\ retrieving binary packages is not currently supported")

        deps = []
        # deps coming from repository, we put at the end to make rebuildind quicker
        repos_deps = []
        for name, version, pkg_requirements in requirements:
            print(f"{name}: ", end='')
            if name in PYTHON_DEPS_IGNORE:
                print(f"{name!r} ignored")
                continue
            if name in pkg_repos:
                # main_package must be in normal dependencies
                target_deps = repos_deps if name != main_package else deps
                repos = pkg_repos[name]
                dep_hash = ""
                url = ""
                source_release = {"packagetype": f"VCS ({repos['type']})"}
            else:
                target_deps = deps
                r = requests.get(f"https://pypi.org/pypi/{name}/{version}/json")
                r.raise_for_status()
                dep_json = r.json()
                release_json = dep_json["releases"][version]
                source_release = next(
                    r for r in release_json if r['packagetype'] == 'sdist'
                )
                dep_hash = source_release['digests']['sha256']
                url = source_release['url']
                repos = None
            dep = Package(
                name=name,
                version=version,
                hash_=dep_hash,
                url=url,
                requirements=pkg_requirements,
                repos=repos,
            )
            target_deps.append(dep)
            deps_map[name] = dep
            print(f"version: {version}, type: {source_release['packagetype']}")

    # we remove deps already installed with PYTHON_SETUP_REQUIREMENTS
    for name in python_setup_requirements:
        if not isinstance(name, str):
            # this is a package with version in a list, first item is the name
            name = name[0]
        package_data = deps_map.get(name)
        if package_data is not None:
            deps.remove(package_data)
            print(f"removed {name} which is already a setup requirement")

    # main package is installed at the end of the manifest
    # so we remove it (it is the last package in deps)
    main_pack = deps.pop()
    assert main_pack.name == main_package

    print("\nwe now generate modules for python dependencies\n")
    data = []
    version_force = {
        canonical(n): v
        for n,v in build_settings.get('python_deps_version_force', {}).items()
    }

    for dep in deps + repos_deps:
        version_forced = version_force.get(dep.name)
        if version_forced == 'pre-release':
            print(f"using pre-release version for {dep.name} as requested in build "
                  "settings")
            dep_data = get_python_package(dep.name, step_message='', with_pre=True)[0]
        elif version_forced is not None:
            print(f"using version {version_forced} for {dep.name} as requested in "
                  "build settings")
            dep_data = get_python_package(dep.name, step_message='',
                                          version=version_forced)[0]
        elif dep.repos is not None:
            package_source = cache_from_repos(dep.name, dep.repos)
            dep_data = get_repos_module(dep.name, package_source)
        else:
            dep_data = {
                "name": dep.name,
                "buildsystem": "simple",
                "build-commands": [
                    PIP_CMD.format(package=".")
                ],
                "sources": [
                    {
                        "type": "archive",
                        "url": dep.url,
                        "sha256": dep.hash_,
                    }
                ],
            }

        if dep.name in PYTHON_DEP_OVERRIDE:
            print(f"applying override for {dep.name}")
            override = PYTHON_DEP_OVERRIDE[dep.name]

            # key suffixed with "_extend" won't replace the data
            for key in list(override.keys()):
                if key.endswith('_extend'):
                    real_key = key[:-7]
                    extend_data = override.pop(key)
                    if real_key == 'sources':
                        for item_data in extend_data:
                            # we look for missing hashes and add them
                            if (item_data.get('type') in ('file', 'patch')
                                and 'sha256' not in item_data):
                                with open(item_data['path'], 'rb') as f:
                                    hash_ = hashlib.sha256(f.read()).hexdigest()
                                item_data['sha256'] = hash_

                    dep_data.setdefault(real_key, []).extend(extend_data)

            dep_data.update(override)

        data.append(dep_data)

    set_cache(step_name, data)
    return data


def cache_from_repos(name, dev_repos):
    """Get and cache locally a repository and returns cache path"""
    print(f"retrieving code from repository {dev_repos!r}")
    repos_type = dev_repos.get('type', 'hg')
    if repos_type != 'hg':
        raise NotImplementedError("only Mercurial is managed so far")
    url = dev_repos['url']
    dest_path = get_cache_dir() / name
    dest_path.mkdir(parents=True, exist_ok=True)
    if (dest_path / '.hg').is_dir():
        print("code is already in cache, updating")
        hg_cplted = subprocess.run(["hg",  "pull", "-u", "--cwd", dest_path])
        hg_cplted.check_returncode()
    else:
        try:
            print(f"retrieving code from repository {url} into {dest_path}")
            hg_cplted = subprocess.run(["hg",  "clone", url, dest_path])
            hg_cplted.check_returncode()
        except Exception as e:
            shutil.rmtree(dest_path)
            raise e
    return dest_path


def get_repos_module(package, source):
    """Generate manifest module for a locally downloaded repository"""
    dep_data = {
        "name": package,
        "buildsystem": "simple",
        "build-commands": [
            PIP_CMD.format(package=".")
        ],
        "sources": [
            {
                "type": "dir",
                "path": str(source)
            }
        ],
    }
    return dep_data


def get_sat_media():
    """Generate module for last version of sat_media available on the FTP"""
    step_name = 'sat_media'
    print_step("retrieving latest version of sat_media")
    cache = get_cache(step_name)
    if cache is not None:
        return cache
    url = "https://ftp.goffi.org/sat_media/sat_media.tar.bz2"
    r = requests.get(url)
    r.raise_for_status()
    hash_ = hashlib.sha256(r.content).hexdigest()
    dep_data = {
        "name": "sat-media",
        "buildsystem": "simple",
        "build-commands": [
            "cp -vr . ${FLATPAK_DEST}/share/sat-media"
        ],
        "sources": [
            {
                "type": "archive",
                "url": url,
                "sha256": hash_,
            }
        ],
    }

    data = [dep_data]
    set_cache(step_name, data)
    return data


def get_icon():
    icon = build_settings.get('icon')
    if icon is None:
        return []
    else:
        if isinstance(icon, str):
            icon = {'url': icon}
        icon_path = Path(urlparse(icon['url']).path)
        suffix = icon_path.suffix[1:]
        if suffix not in ('svg', 'png'):
            raise ValueError("invalid icon, you must use a SVG or PNG image!")
        if 'size' not in icon:
            if suffix == 'svg':
                icon['size'] = 'scalable'
            else:
                raise ValueError('icon size is not specified, please add a "size" key')

        dest_path = f"/app/share/icons/hicolor/{icon['size']}/apps/{app_id}.{suffix}"

        data = file_from_url(
            url = icon['url'],
            dest = dest_path,
            step_name = f"icon__{app_id}",
            step_message = "retrieving application icon",
        )
        data[0]['name'] = 'icon'
        return data


def generate_appdata_from_template(template_file):
    appdata_file = Path(f"{app_id}.appdata.xml")
    if appdata_file.exists() and not args.force:
        confirm = input(OVERWRITE_WARNING.format(appdata_file))
        if confirm != 'y':
            print("manifest building cancelled")
            sys.exit(0)
    parser = etree.XMLParser(remove_blank_text=True)
    tree = etree.parse(template_file, parser)
    root = tree.getroot()
    if args.version == 'dev':
        print(f"addind release data for dev version ({dev_version_rev})")
        releases_elt = root.find('releases')
        if releases_elt is None:
            raise ValueError(
                "<releases/> element is missing in appdata template, please add it")
        release_elt = etree.Element(
            "release",
            {'type': 'development',
             'version': dev_version_rev,
             'date': dev_version_date},
        )
        releases_elt.insert(0, release_elt)
        description_elt = etree.SubElement(release_elt, 'description')
        text_lines = APPDATA_RELEASE_DEV_TEXT.strip().split('\n')
        for text in text_lines:
            etree.SubElement(description_elt, 'p').text = text

    with open(appdata_file, "wb") as f:
        f.write(etree.tostring(root, encoding="utf-8", xml_declaration=True,
                               pretty_print=True))

    return appdata_file.as_posix()


def get_app_metadata():
    desktop_file = build_settings.get('desktop_file')
    appdata_file = build_settings.get('appdata_file')
    if desktop_file is None and appdata_file is None:
        return

    print_step("retrieving application metadata")
    # we don't use cache here to be sure to have always up-to-date files

    data = []

    if desktop_file is not None:
        print("generating module for desktop metadata")
        data.extend(file_upload(
            filename = desktop_file,
            dest=f"/app/share/applications/{app_id}.desktop",
            ))

    if appdata_file is not None:
        if appdata_file.startswith('_tpl_'):
            print("found appdata template, we now use it to generate the file")
            appdata_file = generate_appdata_from_template(appdata_file)
        print("generating module for appdata metadata")
        data.extend(file_upload(
            filename = appdata_file,
            dest=f"/app/share/metainfo/{app_id}.appdata.xml",
            ))

    return data


## main_script ##


if __name__ == '__main__':

    args = parse_args()
    app_id = f"org.libervia.{args.name}"
    package_file = Path(f"{app_id}.json")

    print(f"generating manifest for {app_id} ({args.version})")

    if package_file.exists() and not args.force:
        confirm = input(OVERWRITE_WARNING.format(package_file))
        if confirm != 'y':
            print("manifest building cancelled")
            sys.exit(0)

    tpl_file = Path(f"_tpl_{package_file}")

    if not tpl_file.exists():
        raise RuntimeError(f"Can't find template {tpl_file}, it is mandatory to build"
                           "the manifest!")

    with tpl_file.open() as f:
        template = json.load(f)

    build_settings = template.pop(SETTINGS_KEY, {})
    if PYTHON_VERSION_FORCE:
        build_settings.setdefault('python_deps_version_force', {}).update(PYTHON_VERSION_FORCE)
    if "setup_requirements" in build_settings:
        python_setup_requirements.extend(build_settings["setup_requirements"])
    main_package = canonical(build_settings['package'])
    if args.version == 'dev':
        # mercurial is needed for dev version to install but also to
        # retrieve revision used
        python_setup_requirements.append("mercurial")
        if 'dev_repos' not in build_settings:
            raise NotImplementedError(
                "dev version can currently only be handled with a dev repostiory "
                "(dev_repos)")
        dev_repos = build_settings['dev_repos']
        pkg_repos[main_package] = dev_repos
        print_step("retrieving code from repository")
        main_package_source = cache_from_repos(main_package, dev_repos)
        dev_version_rev, dev_version_date = get_hg_id_date(main_package_source)
    else:
        main_package_source = main_package

    manifest = {}
    manifest['app-id'] = app_id
    if args.version == 'dev':
        manifest['default-branch'] = args.version
    # we update DEFAULT_MANIFEST only now to have "app-id" and "default-branch" on the top
    # of the manifest, also we don't want modules to be set now
    default_modules = DEFAULT_MANIFEST.pop('modules', [])
    manifest.update(DEFAULT_MANIFEST)
    manifest.update(template)

    # modules.extend(get_libxslt())

    # setup requirements
    modules.extend(get_python_package(
        python_setup_requirements,
        step_name=f"setup_requirements__{app_id}",
        step_message="generating modules for setup requirements")
    )

    # python dependencies
    modules.extend(get_python_deps())

    # at this point we add things specified in the template
    modules.extend(manifest.get('modules', []))
    modules = default_modules + modules
    manifest['modules'] = modules

    # sat common things
    existing_modules = {d['name'] for d in modules if not isinstance(d, str)}
    if "sat_templates" not in existing_modules:
        modules.extend(get_python_package("sat_templates", with_pre=True))
    modules.extend(get_sat_media())
    conf_file = build_settings.get('conf_file')
    if conf_file:
        modules.extend(file_upload('libervia.conf', '/app', src=conf_file,
                                   replace={'##DBUS_PREFIX##': app_id},
                                   use_install=True))
    else:
        modules.extend(file_upload('libervia.conf', '/app'))

    # wrapper to launch the backend if needed
    wrapped_command = build_settings['wrapped_command']
    manifest['command'] = command = f'{wrapped_command}_wrapper'
    wrapper_template = build_settings.get("wrapper_template", "libervia_wrapper.py")
    modules.extend(file_upload(command, src=wrapper_template,
                               replace={'##COMMAND##': wrapped_command},
                               use_install=True))

    # icon
    modules.extend(get_icon())

    # desktop file and appdata file
    modules.extend(get_app_metadata())

    # now the app itself
    if args.version == 'dev':
        modules.append(get_repos_module(main_package, main_package_source))
    else:
        modules.extend(get_python_package(main_package, version=args.version))

    print_step("writing manifest")
    with package_file.open('w') as f:
        json.dump(manifest, f, indent=4)

    if args.ignore_cache and args.ignore_cache != ['all']:
        print("/!\\ those --ignore-cache arguments don't correspond to any step: {}".
              format(', '.join(args.ignore_cache)))

    print(f"manifest generated successfully at {package_file}")

    if args.export is not None:
        print_step("exporting building files")
        print(f"exporting to {args.export}\n")
        to_export = [
            ("package file", package_file),
        ]

        for m in modules:
            if isinstance(m, str):
                continue
            for s in m.get('sources', []):
                s_type = s.get('type')
                if s_type in ('path', 'file'):
                    try:
                        path = s['path']
                    except KeyError:
                        if 'url' in s:
                            continue
                        else:
                            raise ValueError(f"path missing for module source:\n{s}")
                    to_export.append((s_type, Path(path)))

        for label, path in to_export:
            print(f"exporting {label}: {path}")
            dest = args.export / path
            if dest.exists():
                if args.force_export:
                    print(f"    replacing existing {dest}")
                    if path.is_dir():
                        shutil.rmtree(dest)
                    else:
                        dest.unlink()
                else:
                    print("    it already exists, skipping")
                    continue
            if args.symlink == 'all':
                os.symlink(path, dest)
            else:
                if path.is_dir():
                    shutil.copytree(path, dest)
                else:
                    shutil.copyfile(path, dest)

        if args.version == 'dev':
            print("exporting cache")
            dest = args.export / get_cache_dir()
            if args.force_export and os.path.lexists(dest):
                print(f"    replacing existing {dest}")
                if dest.is_symlink():
                    dest.unlink()
                else:
                    shutil.rmtree(dest)

            if dest.exists():
                print("    it already exists, skipping")
            else:
                if args.symlink in ('all', 'cache'):
                    os.symlink(get_cache_dir().resolve(), dest)
                else:
                    shutil.copytree(get_cache_dir(), dest)

    print("\nexport completed")