# HG changeset patch # User Goffi # Date 1561211947 -7200 # Node ID 274af514a5cf1cccac62eec3e7a1294aeb5af649 # Parent 4c4c8ea4182ab0d525a3f6ce2ccb5b708f01a58a flatpak: reworked packages + made a building script: a new `build_manifest.py` script can now be used to generate flatpak manifests for every frontend of SàT. The manifest can be used both for development versions and stable ones. Templates files (in the form `_tmp_.json`) are used to set building instructions. A common runtime specific to SàT has been abandoned following a discussion on the official mailing list. A small wrapper is now used to launch backend automatically if it's not found. Desktop and app metadata have been added for Cagou. Jp and Primitivus don't have appdata and desktop files yet. diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/_tpl_org.salutatoi.Cagou.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flatpak/_tpl_org.salutatoi.Cagou.json Sat Jun 22 15:59:07 2019 +0200 @@ -0,0 +1,72 @@ +{ + "_build_settings": { + "wrapped_command": "cagou", + "dev_repos": {"type": "hg", "url": "https://repos.goffi.org/cagou"}, + "setup_requirements": ["cython"], + "python_deps_version_force": { + "urwid-satext": "pre-release", + "sat_tmp": "pre-release" + }, + "icon": "https://repos.goffi.org/sat_media/raw-file/tip/icons/muchoslava/svg/cagou_profil_bleu_avec_cou.svg", + "desktop_file": "org.salutatoi.Cagou.desktop", + "appdata_file": "org.salutatoi.Cagou.appdata.xml" + }, + "finish-args": [ + "--socket=session-bus", + "--socket=x11", + "--share=ipc", + "--socket=wayland", + "--share=network", + "--filesystem=home" + ], + "modules": [ + { + "name": "mtdev", + "sources": [ + { + "type": "archive", + "url": "http://bitmath.org/code/mtdev/mtdev-1.1.5.tar.bz2", + "sha256": "6677d5708a7948840de734d8b4675d5980d4561171c5a8e89e54adf7a13eba7f" + } + ] + }, + { + "name": "xsel", + "sources": [ + { + "type": "archive", + "url": "http://www.vergenet.net/~conrad/software/xsel/download/xsel-1.2.0.tar.gz", + "sha256": "b927ce08dc82f4c30140223959b90cf65e1076f000ce95e520419ec32f5b141c" + } + ] + }, + { + "name": "garden-contextmenu", + "buildsystem": "simple", + "build-commands": [ + "python2 -c 'import os, shutil, kivy; d=os.path.join(os.path.dirname(kivy.__file__), \"garden/contextmenu\"); shutil.copytree(\".\",d)'" + ], + "sources": [ + { + "type": "archive", + "url": "https://ftp.goffi.org/kivy_garden/contextmenu.zip", + "sha256": "34c81cdc0ae968537ae6ecfa3d9417d942c82ad012de5ca72fd9d0f5de57cac9" + } + ] + }, + { + "name": "garden-modernmenu", + "buildsystem": "simple", + "build-commands": [ + "python2 -c 'import os, shutil, kivy; d=os.path.join(os.path.dirname(kivy.__file__), \"garden/modernmenu\"); shutil.copytree(\".\",d)'" + ], + "sources": [ + { + "type": "archive", + "url": "https://ftp.goffi.org/kivy_garden/modernmenu.zip", + "sha256": "f9a6f8d4248050ed2d1be9b7ed67059e4bfdd5b52009606aaf643eb66f41b531" + } + ] + } + ] +} diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/_tpl_org.salutatoi.Jp.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flatpak/_tpl_org.salutatoi.Jp.json Sat Jun 22 15:59:07 2019 +0200 @@ -0,0 +1,7 @@ +{ + "_build_settings": { + "package": "sat", + "wrapped_command": "jp", + "dev_repos": {"type": "hg", "url": "https://repos.goffi.org/sat"} + } +} diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/_tpl_org.salutatoi.Primitivus.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flatpak/_tpl_org.salutatoi.Primitivus.json Sat Jun 22 15:59:07 2019 +0200 @@ -0,0 +1,7 @@ +{ + "_build_settings": { + "package": "sat", + "wrapped_command": "primitivus", + "dev_repos": {"type": "hg", "url": "https://repos.goffi.org/sat"} + } +} diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/build_manifest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flatpak/build_manifest.py Sat Jun 22 15:59:07 2019 +0200 @@ -0,0 +1,983 @@ +#!/usr/bin/env python3 + +import tempfile +import subprocess +from pathlib import Path +from typing import List +from dataclasses import dataclass +import hashlib +from ftplib import FTP +from urllib.parse import urlparse +import sys +import os +import json +import time +import argparse +import shutil +from packaging.version import parse as parse_version +import requests + + +CACHE_LIMIT = 3600 * 24 +PYTHON_DEP_OVERRIDE = { + "lxml": { + "build-commands": [ + "python2 ./setup.py install --prefix=${FLATPAK_DEST} --optimize=1" + ], + "build-options": { + "env": { + "XSLT_CONFIG": "pkg-config libxslt" + } + }, + }, + "dbus-python": { + "build-commands": [ + "python2 setup.py build", + "python2 setup.py install --prefix=${FLATPAK_DEST}", + ] + }, + "urwid": { + "sources_extend": [ + { + "type": "patch", + "path": "main_loop.patch" + } + ] + }, + "kivy": { + "post-install": [ + # this file is not copied normally when installing with + # `python2 setup.py install`. + # TODO: report upstream + "cp kivy/setupconfig.py /app/lib/python2.7/site-packages/Kivy-*.egg/kivy/" + ] + }, +} +PYTHON_DEP_REQUIREMENTS_UPDATE = { + # service-identity is not seen as a twisted requirement, so it's sometimes misplaced + 'twisted': ['service-identity'], +} +PYTHON_SETUP_REQUIREMENTS = [ + 'setuptools', # to have an up-to-date version + 'setuptools_scm', + 'docutils', # needed my m2r + 'mistune', # needed my m2r + 'm2r', # needed by automat +] +DEFAULT_MANIFEST = { + "runtime": "org.freedesktop.Platform", + "runtime-version": "1.6", + "sdk": "org.freedesktop.Sdk", + "sdk-extensions": [ + "org.freedesktop.Sdk.Debug", + "org.freedesktop.Sdk.Locale", + "org.freedesktop.Sdk.Docs" + ], + "platform-extensions": [ + "org.freedesktop.Platform.Locale" + ], + "command": "sat_wrapper", + "finish-args": [ + "--socket=session-bus", + "--share=network", + "--filesystem=home" + ] +} +SHOW_REQUIRES_HEADER = 'Requires: ' +SETTINGS_KEY = '_build_settings' + + +@dataclass +class Package: + name: str + version: str + hash_: str + url: str + requirements: List[str] + + def __hash__(self): + return hash(self.name) + + +def print_step(step): + print() + print("┌" + "─" * (len(step) + 2) + "┐") + print("│ {} │".format(step)) + print("└" + "─" * (len(step) + 2) + "┘") + print() + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Flatpak manifest builder for Salut à Toi') + + build_group = parser.add_argument_group('building', 'options used to generate the manifest') + export_group = parser.add_argument_group('export', 'otions used to building files') + + # build group + build_group.add_argument('-f', '--force', action="store_true", + help="force overwritting of existing manifest") + build_group.add_argument('--ignore-cache', action='append', default=[], + help='ignore the cache of this step ("all" to ignore all caches)') + build_group.add_argument( + '--deps-dir', + help="use this directory to build_group python dependencies (it won't be deleted at " + "the end, useful when you want to re-use it and avoir re-downloading)") + build_group.add_argument('--no-binary', help="don't use binary packages") + + # export group + export_group.add_argument('-s', '--symlink', choices=['no', 'all', 'cache'], + default='no', + help='"no" to copy all files, "all" to symlink all files, "cache" to copy files ' + 'and symlink only cache (default: "no")') + export_group.add_argument('-F', '--force-export', action='store_true', + help='force overwritting of existing files/symlinks when exporting (DEFAULT: ' + 'existing files are skipped)') + export_group.add_argument('-e', '--export', type=Path, + help="export build files to this directory (DEFAULT: don't export files)") + + # common + parser.add_argument('name', type=str, help="name of the package to build_group") + parser.add_argument('version', type=str, help="version of the package to build_group") + + args = parser.parse_args() + + if 'all' in args.ignore_cache and len(args.ignore_cache) != 1: + parser.error('"all" cannot be used with step names in --ignore-cache, ' + 'use it alone') + + if args.export is not None and not args.export.is_dir(): + parser.error(f"{args.export} is not a directory!") + + return args + + +## useful methods ## + +def get_cache(name): + """Retrieve cache for a step + + cache will be ignored if too old, or explicitly ignored by user + @param name(str): name of the step + @return (object): cached data + """ + # name of the cache without prefix, mainly used for python_deps + shortname = name.split('__', 1)[0] + + if shortname in args.ignore_cache or 'all' in args.ignore_cache: + print(f"ignoring cache for {shortname}") + args.ignore_cache.remove(shortname) + return None + try: + cache_name = cache[name] + cache_updated = cache_name['updated'] + except KeyError: + return None + if time.time() - cache_updated > CACHE_LIMIT: + print(f"cache for {name} is too old, we won't use it") + return None + print(f"cache found for {shortname}") + return cache_name['data'] + + +def set_cache(name, data): + """Update cache for a step + + @param name(str): name of the step + @param data(object): data to cache + """ + cache_name = cache.setdefault(name, {}) + cache_name['updated'] = time.time() + cache_name['data'] = data + with cache_path.open('w') as f: + json.dump(cache, f) + + +def get_python_package(package_name, step_name=None, step_message=None, with_pre=False, + version=None): + """Generate module for a Python package + + @param package_name(str, list[str]): name of the Python package + use list of str to get several packages at once + @param step_name(None, str): name of the step, None to use package_name + @param step_message(None, str): message of the step (None to use default one) + use empty string to disable it + @param with_pre(bool): if True retrieve also pre-releases + @param version(str, None): indicate the version of the package to download + if None, the most recent version compativle with `with_pre` will be used + """ + single = isinstance(package_name, str) + if step_name is None: + step_name = package_name if single else ' - '.join(package_name) + if step_message != "": + print_step(step_message or f"retrieving latest version of {package_name}") + cache = get_cache(step_name) + if cache is not None: + return cache + package_names = [package_name] if single else package_name + data = [] + for name in package_names: + r = requests.get(f"https://pypi.org/pypi/{name}/json") + r.raise_for_status() + + releases_data = [] + for version_raw, release_data in r.json()["releases"].items(): + if version is not None and version != version_raw: + continue + release_version = parse_version(version_raw) + if release_version.is_prerelease and not with_pre: + continue + releases_data.append((release_version, release_data)) + + # we sort releases by version to be sure to have latests one + releases_data.sort(key=lambda d: d[0]) + + try: + release_version, release_data = releases_data[-1] + package = next(r for r in release_data if r["packagetype"] == "sdist") + except (IndexError, StopIteration): + raise RuntimeError(f"Can't find a matching package for {name}") + + print(f"{name} {release_version} will be used") + dep_data = { + "name": name, + "buildsystem": "simple", + "build-commands": [ + "python2 setup.py install --prefix=${FLATPAK_DEST}" + ], + "sources": [ + { + "type": "archive", + "url": package["url"], + "sha256": package["digests"]["sha256"], + } + ], + "ensure-writable": [ + "/lib/python2.7/site-packages/easy-install.pth", + "/lib/python2.7/site-packages/setuptools.pth", + ] + } + data.append(dep_data) + + set_cache(step_name, data) + return data + + +def file_upload(filename, dest="/app/bin", src=None, replace=None, use_install=False): + """Generate manifest modules to upload a local file + + @param filename(str): name of the local file to upload + Note that it will be overwritted if replace is used + @param dest(str, Path): path where the file must be installed in the container + @param src(str, None): when replace is used, name of the source file + @param replace(dict, None): mapping of text to replace + Use when file must be dynamicly adapted to the container, note that src must be + set to the name of the template file if this is used. + e.g. {'##COMMAND##', 'cagou'} will replace all '##COMMAND##' in filename + by 'cagou' + @param use_install(bool): if True, install file with `install src dest` + else, use `mv src dest` + @return (list(dict)): modules to add to the manifest + """ + assert isinstance(filename, str) and '/' not in filename + print_step(f"generating {filename} module") + # no cache for this one, to be sure to have always latest version + filename = Path(filename) + if src is None: + file_to_test = filename + else: + src = Path(src) + file_to_test = src + if not file_to_test.exists(): + raise RuntimeError( + f"{file_to_test} is missing, it is needed to build the manifest!") + + if replace: + if src is None: + raise ValueError( + '"src" must be set to name of template file if replace is used') + print(f'doing replacement in template "{src}" to dest "{filename}"') + # there are strings to replace, we read file, do the changes and dump the + # result in + with open(src) as f: + buff = f.read() + + for old, new in replace.items(): + buff = buff.replace(old, new) + + with open(filename, 'w') as f: + f.write(buff) + else: + if src is not None: + raise ValueError('"src" must not be used if replace is not used') + + with filename.open('rb') as f: + hash_ = hashlib.sha256(f.read()).hexdigest() + + dest = Path(dest) + + dep_data = { + "name": str(filename), + "buildsystem": "simple", + "build-commands": [ + ], + "sources": [ + { + "type": "file", + "path": str(filename), + "sha256": hash_, + } + ] + } + + build_commands = dep_data['build-commands'] + if use_install: + build_commands.append(f"install -Dv {filename} {dest}") + else: + if dest.as_posix() not in ('/app', '/app/bin'): + # just in case the destination directory doesn't exist + build_commands.append(f"mkdir -p {dest.parent}") + build_commands.append(f"mv -v {filename} {dest}") + + return [dep_data] + + +def file_from_url(url, dest=None, step_name=None, step_message=None): + """Generate manifest modules for a file either on a http(s) url or local + + @param url(str): url of the file to use, or local path + if it starts with http, url will be used, else file_upload(url) will be used + @param dest(str, None): path were the file should be copied + """ + parsed = urlparse(url) + if not parsed.scheme: + return file_upload(url) + if not parsed.scheme.startswith('http'): + raise NotImplementedError ( + f'we can\'t use the URL "{url}", the scheme "{parsed.scheme}" is not managed') + + filepath = Path(parsed.path) + stem = filepath.stem + + if step_name is None: + # we use this name to easily ignore cache (with stem) while avoiding + # conflict if we have 2 URLs with the same stem + step_name = f"{stem}__{url}" + + if step_message is None: + step_messate = f"generating module for {stem}" + + print_step(step_message) + cache = get_cache(step_name) + if cache is not None: + return cache + + r = requests.get(url) + r.raise_for_status() + file_hash = hashlib.sha256(r.content).hexdigest() + + dep_data = {"name": stem} + + if dest is not None: + dest = Path(dest) + dep_data.update({ + 'buildsystem': 'simple', + 'build-commands':[ + f'mkdir -p {dest.parent}', + f'mv "{filepath.name}" "{dest}"', + ] + }) + + dep_data['sources'] = [ + { + "type": "file", + "url": url, + "sha256": file_hash, + } + ] + + data = [dep_data] + set_cache(step_name, data) + return data + + +def get_requirements(pip, package_name): + """Retrieve requirements for an installed python package + + @param pip(Path): path to pip executable to use + package must be installed in the environment of this pip + @param package_name(str): name of the package to retrieve + @return(list[str]): found requirements + """ + show_cplted = subprocess.run([pip, "show", package_name], + capture_output=True, text=True) + show_cplted.check_returncode() + + lines = show_cplted.stdout.split('\n') + requirement_raw = next(l for l in lines if l.startswith(SHOW_REQUIRES_HEADER)) + requirement_raw = requirement_raw[len(SHOW_REQUIRES_HEADER):] + requirements = [canonical(p) for p in requirement_raw.split(',') if p.strip()] + requirements_update = [ + canonical(r) for r in PYTHON_DEP_REQUIREMENTS_UPDATE.get(package_name, [])] + new_requirements = set(requirements_update).difference(requirements) + if new_requirements: + print("adding extra requirements to {}: {}".format( + package_name, ', '.join(new_requirements))) + requirements.extend(new_requirements) + return requirements + + +def resolve_requirements(package, deps_map, deps, indent=0): + """Recursively resolve requirements + + @param package(Package): retrieve dependencies of this package + @param deps_map(dict): map from dependency name to Package instance + @param deps(list[package]): ordered dependencies + this list is updated in place + @param indent(int): use internally to print dependencies tree + """ + if package in deps: + return + print(" " * indent + package.name) + reqs_data = [deps_map[r] for r in package.requirements] + + for data in reqs_data: + resolve_requirements(data, deps_map, deps, indent+1) + + deps.append(package) + + +def get_cache_dir(): + """Return path to directory to use for cache""" + return Path(f"cache_{app_id}") + + +def canonical(name): + """Get canonical name of a package""" + return name.lower().replace('_', '-').strip() + + +modules = [] +cache_path = Path.home() / Path('.cache/sat_flatpak_cache.json') +if not cache_path.exists(): + cache = {} +else: + with cache_path.open() as f: + cache = json.load(f) + + +## steps ## + +def get_libxslt(): + """Generate manifest module to install libxslt (needed for lxml)""" + step_name = 'libxslt' + print_step("retrieving latest version of libxslt") + cache = get_cache(step_name) + if cache is not None: + return cache + + ftp = FTP("xmlsoft.org") + ftp.login() + ftp.cwd("libxml2") + libxslt_archives = [l for l in ftp.nlst() if 'xslt' in l + and l.endswith('tar.gz') + and 'git' not in l + and 'rc' not in l] + latest_libxslt = libxslt_archives[-1] + print(f"latest libxslt found: {latest_libxslt}") + + with tempfile.TemporaryDirectory() as tmp_dirname: + tmp_dir = Path(tmp_dirname) + file_path = tmp_dir / latest_libxslt + with file_path.open('wb+') as f: + ftp.retrbinary('RETR ' + latest_libxslt, f.write) + f.seek(0) + libxslt_hash = hashlib.sha256(f.read()).hexdigest() + + ftp.quit() + + print(f"libxstl hash: {libxslt_hash}") + + data = [{ + "name": "libxslt", + "sources": [ + { + "type": "archive", + "url": f"ftp://xmlsoft.org/libxml2/{latest_libxslt}", + "sha256": libxslt_hash, + } + ] + }] + set_cache(step_name, data) + return data + + +def get_python_deps(): + """Generate manifest modules for python dependencies of main package""" + step_name = f'python_deps__{app_id}' + print_step("retrieving python dependencies") + cache = get_cache(step_name) + if cache is not None: + return cache + + with tempfile.TemporaryDirectory() as tmp_dirname: + if args.deps_dir is not None: + # we ignore the created temporary directory is we have an other one specified + tmp_dirname = args.deps_dir + tmp_dir = Path(tmp_dirname) + env_dir = tmp_dir / 'env' + pip = env_dir / 'bin' / 'pip' + download_dir = tmp_dir / 'archives' + if download_dir.exists() and env_dir.exists(): + print("dependencies are already downloaded and packages are already " + "installed") + else: + download_dir.mkdir() + print(f"working in temporary directory {tmp_dirname}") + venv_cplted = subprocess.run(["/usr/bin/env", "virtualenv2", env_dir]) + venv_cplted.check_returncode() + print("\ndownloading packages") + command_args = [pip, "download", "-d", download_dir, + "--progress-bar", "emoji"] + if args.no_binary: + command_args.extend(["--no-binary", ":all:"]) + command_args.append(main_package_source) + down_cplted = subprocess.run(command_args) + down_cplted.check_returncode() + print("\ninstalling package\n") + # we install package to have a good map of requirements, pypi metadata + # are incomplete. Packages should be retrieved from pip cache + inst_cplted = subprocess.run([pip, "install", main_package_source]) + inst_cplted.check_returncode() + + print_step("analyzing python dependencies") + deps_map = {} + + for archive_path in download_dir.iterdir(): + name, right_part = archive_path.name.rsplit('-', 1) + name_canonical = canonical(name) + if right_part.endswith('.tar.gz'): + version = right_part[:-7] + elif right_part.endswith('.tar.bz2'): + version = right_part[:-8] + elif right_part.endswith('.zip'): + version = right_part[:-4] + else: + raise ValueError( + f"Archive are supposed to be .tar.gz, .tar.bz2 or .zip archives, but " + f"file found is {archive_path.name}, did something change on Pypy?") + with open(archive_path, "rb") as f: + dep_hash = hashlib.sha256(f.read()).hexdigest() + r = requests.get(f"https://pypi.org/pypi/{name}/{version}/json") + r.raise_for_status() + dep_json = r.json() + release_json = dep_json["releases"][version] + try: + version_json = next(v for v in release_json + if v['digests']['sha256'] == dep_hash) + except IndexError: + raise ValueError(f"Can't find the version we downloaded for {name}") + + requirements = get_requirements(pip, name_canonical) + + dep = Package(name=name_canonical, + version=version, + hash_=dep_hash, + url=version_json['url'], + requirements=requirements, + # extra_requirements=extra_requirements, + ) + + deps_map[name_canonical] = dep + print(f"found dependency: {dep.name} {dep.version}") + + print_step("ordering dependencies") + + requirements = get_requirements(pip, main_package) + main_pack = Package(name=main_package, + version=args.version, + hash_="", + url="", + requirements=requirements, + ) + + deps = [] + print("resolving requirements\n") + resolve_requirements(main_pack, deps_map, deps) + missing_deps = set(deps_map.values()).difference(deps) + if missing_deps: + print("\n/!\\ some dependencies are missing this should not happen! " + "Adding them\n") + print("additional requirements") + for pack in missing_deps: + resolve_requirements(pack, deps_map, deps) + + # we remove deps already installed with PYTHON_SETUP_REQUIREMENTS + for name in PYTHON_SETUP_REQUIREMENTS: + package_data = deps_map.get(name) + if package_data is not None: + deps.remove(package_data) + print(f"removed {name} which is already a setup requirement") + + # main package is installed at the end + deps.remove(main_pack) + + print("\npackages are now ordered: {}".format(", ".join(d.name for d in deps))) + + print("\nwe now generate modules for python dependencies") + data = [] + version_force = { + canonical(n): v + for n,v in build_settings.get('python_deps_version_force', {}).items() + } + + for dep in deps: + version_forced = version_force.get(dep.name) + if version_forced == 'pre-release': + print(f"using pre-release version for {dep.name} as requested in build " + "settings") + dep_data = get_python_package(dep.name, step_message='', with_pre=True)[0] + elif version_forced is not None: + print(f"using version {version_forced} for {dep.name} as requested in " + "build settings") + dep_data = get_python_package(dep.name, step_message='', + version=version_forced)[0] + else: + dep_data = { + "name": dep.name, + "buildsystem": "simple", + "build-commands": [ + "python2 setup.py install --prefix=${FLATPAK_DEST}" + ], + "sources": [ + { + "type": "archive", + "url": dep.url, + "sha256": dep.hash_, + } + ], + "ensure-writable": [ + "/lib/python2.7/site-packages/easy-install.pth", + "/lib/python2.7/site-packages/setuptools.pth", + ] + } + + if dep.name in PYTHON_DEP_OVERRIDE: + print(f"applying override for {dep.name}") + override = PYTHON_DEP_OVERRIDE[dep.name] + + # key suffixed with "_extend" won't replace the data + for key in list(override.keys()): + if key.endswith('_extend'): + real_key = key[:-7] + extend_data = override.pop(key) + if real_key == 'sources': + for item_data in extend_data: + # we look for missing hashes and add them + if (item_data.get('type') in ('file', 'patch') + and 'sha256' not in item_data): + with open(item_data['path'], 'rb') as f: + hash_ = hashlib.sha256(f.read()).hexdigest() + item_data['sha256'] = hash_ + + dep_data.setdefault(real_key, []).extend(extend_data) + + dep_data.update(override) + + data.append(dep_data) + + set_cache(step_name, data) + return data + + +def cache_from_repos(): + """Get and cache locally a repository and returns cache path""" + print_step("retrieving code from repository") + dev_repos = build_settings['dev_repos'] + repos_type = dev_repos.get('type', 'hg') + if repos_type != 'hg': + raise NotImplementedError("only Mercurial is managed so far") + url = dev_repos['url'] + dest_path = get_cache_dir() / main_package + dest_path.mkdir(parents=True, exist_ok=True) + if (dest_path / '.hg').is_dir(): + print("code is already in cache") + else: + try: + print(f"retrieving code from repository {url} into {dest_path}") + hg_cplted = subprocess.run(["hg", "clone", url, dest_path]) + hg_cplted.check_returncode() + except Exception as e: + shutil.rmtree(dest_path) + raise e + return dest_path + + +def get_repos_module(): + """Generate manifest module for the repository""" + dep_data = { + "name": main_package, + "buildsystem": "simple", + "build-commands": [ + "python2 setup.py install --prefix=${FLATPAK_DEST}" + ], + "sources": [ + { + "type": "dir", + "path": str(main_package_source) + } + ], + "ensure-writable": [ + "/lib/python2.7/site-packages/easy-install.pth", + "/lib/python2.7/site-packages/setuptools.pth", + ] + } + return [dep_data] + + +def get_sat_media(): + """Generate module for last version of sat_media available on the FTP""" + step_name = 'sat_media' + print_step("retrieving latest version of sat_media") + cache = get_cache(step_name) + if cache is not None: + return cache + url = "https://ftp.goffi.org/sat_media/sat_media.tar.bz2" + r = requests.get(url) + r.raise_for_status() + hash_ = hashlib.sha256(r.content).hexdigest() + dep_data = { + "name": "sat-media", + "buildsystem": "simple", + "build-commands": [ + "cp -vr . ${FLATPAK_DEST}/share/sat-media" + ], + "sources": [ + { + "type": "archive", + "url": url, + "sha256": hash_, + } + ], + } + + data = [dep_data] + set_cache(step_name, data) + return data + + +def get_icon(): + icon = build_settings.get('icon') + if icon is not None: + if isinstance(icon, str): + icon = {'url': icon} + icon_path = Path(urlparse(icon['url']).path) + suffix = icon_path.suffix[1:] + if suffix not in ('svg', 'png'): + raise ValueError("invalid icon, you must use a SVG or PNG image!") + if 'size' not in icon: + if suffix == 'svg': + icon['size'] = 'scalable' + else: + raise ValueError('icon size is not specified, please add a "size" key') + + dest_path = f"/app/share/icons/hicolor/{icon['size']}/apps/{app_id}.{suffix}" + + return file_from_url( + url = icon['url'], + dest = dest_path, + # we have common cache if several manifest use the same icon URL + step_name = f"icon__{icon}", + step_message = "retrieving application icon", + ) + + +def get_app_metadata(): + desktop_file = build_settings.get('desktop_file') + appdata_file = build_settings.get('appdata_file') + if desktop_file is None and app_data_file is None: + return + + print_step("retrieving application metadata") + # we don't use cache here to be sure to have always up-to-date files + + data = [] + + if desktop_file is not None: + print("generating module for desktop metadata") + data.extend(file_upload( + filename = desktop_file, + dest=f"/app/share/applications/{app_id}.desktop", + )) + + if appdata_file is not None: + print("generating module for appdata metadata") + data.extend(file_upload( + filename = appdata_file, + dest=f"/app/share/metainfo/{app_id}.appdata.xml", + )) + + return data + + +## main_script ## + + +if __name__ == '__main__': + + args = parse_args() + title = args.name.title() + app_id = f"org.salutatoi.{title}" + package_file = Path(f"{app_id}.json") + + print(f"generating manifest for {app_id} ({args.version})") + + if package_file.exists() and not args.force: + confirm = input( + f"{package_file} already exists, do you want to overwritte it (y/N)? ") + if confirm != 'y': + print("manifest building cancelled") + sys.exit(0) + + tpl_file = Path(f"_tpl_{package_file}") + + if not tpl_file.exists(): + raise RuntimeError(f"Can't find template {tpl_file}, it is mandatory to build" + "the manifest!") + + with tpl_file.open() as f: + template = json.load(f) + + build_settings = template.pop(SETTINGS_KEY, {}) + if "setup_requirements" in build_settings: + PYTHON_SETUP_REQUIREMENTS.extend(build_settings["setup_requirements"]) + main_package = canonical(build_settings.get('package', args.name)) + if args.version == 'dev' and 'dev_repos' in build_settings: + dev_repos = build_settings['dev_repos'] + main_package_source = cache_from_repos() + else: + main_package_source = main_package + + manifest = {} + manifest['app-id'] = app_id + manifest['default-branch'] = args.version + # we update DEFAULT_MANIFEST only now to have "app-id" and "default-branch" on the top + # of the manifest + manifest.update(DEFAULT_MANIFEST) + manifest.update(template) + + modules.extend(get_libxslt()) + + # setup requirements + modules.extend(get_python_package( + PYTHON_SETUP_REQUIREMENTS, + step_name="setup_requirements", + step_message="generating modules for setup requirements") + ) + + # python dependencies + modules.extend(get_python_deps()) + + # at this point we add things specified in the template + modules.extend(manifest.get('modules', [])) + manifest['modules'] = modules + + # sat common things + existing_modules = {d['name'] for d in modules} + if "sat_templates" not in existing_modules: + modules.extend(get_python_package("sat_templates", with_pre=True)) + modules.extend(get_sat_media()) + modules.extend(file_upload('sat.conf', '/app')) + + # wrapper to launch the backend if needed + wrapped_command = build_settings.get('wrapped_command') + if wrapped_command: + modules.extend(file_upload('sat_wrapper', src='sat_wrapper.py', + replace={'##COMMAND##': wrapped_command}, + use_install=True)) + + # icon + modules.extend(get_icon()) + + # desktop file and appdata file + modules.extend(get_app_metadata()) + + # now the app itself + if args.version == 'dev' and 'dev_repos' in build_settings: + # mercurial is needed for dev version to install but also to + # retrieve revision used + modules.extend(get_python_package("mercurial")) + modules.extend(get_repos_module()) + else: + modules.extend(get_python_package(main_package, version=args.version)) + + print_step("writing manifest") + with package_file.open('w') as f: + json.dump(manifest, f, indent=4) + + if args.ignore_cache: + print("/!\\ those --ignore-cache arguments don't correspond to any step: {}". + format(', '.join(args.ignore_cache))) + + print(f"manifest generated successfully at {package_file}") + + if args.export is not None: + print_step("exporting building files") + print(f"exporting to {args.export}\n") + to_export = [ + ("package file", package_file), + ] + + for m in modules: + for s in m.get('sources', []): + s_type = s.get('type') + if s_type in ('path', 'file'): + try: + path = s['path'] + except KeyError: + if 'url' in s: + continue + else: + raise ValueError(f"path missing for module source:\n{s}") + to_export.append((s_type, Path(path))) + + for label, path in to_export: + print(f"exporting {label}: {path}") + dest = args.export / path + if dest.exists(): + if args.force_export: + print(f" replacing existing {dest}") + if path.is_dir(): + shutil.rmtree(dest) + else: + dest.unlink() + else: + print(" it already exists, skipping") + continue + if args.symlink == 'all': + os.symlink(path, dest) + else: + if path.is_dir(): + shutil.copytree(path, dest) + else: + shutil.copyfile(path, dest) + + if args.version == 'dev': + print("exporting cache") + dest = args.export / get_cache_dir() + if args.force_export and os.path.lexists(dest): + print(f" replacing existing {dest}") + if dest.is_symlink(): + dest.unlink() + else: + shutil.rmtree(dest) + + if dest.exists(): + print(" it already exists, skipping") + else: + if args.symlink in ('all', 'cache'): + os.symlink(get_cache_dir().resolve(), dest) + else: + shutil.copytree(get_cache_dir(), dest) + + print("\nexport completed") diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/org.salutatoi.Cagou.appdata.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flatpak/org.salutatoi.Cagou.appdata.xml Sat Jun 22 15:59:07 2019 +0200 @@ -0,0 +1,50 @@ + + + + org.salutatoi.Cagou + FSFAP + AGPL-3.0-or-later + Cagou (SàT) + Multi-purpose communication ecosystem (desktop/mobile frontend) - XMPP standard + + Network + Chat + FileTransfer + + +

Salut à Toi is a multi-purpose communication ecosystem.

+

It offers many tools to communicate and collaborate (instant messaging, blogging, file sharing, photos albums, forums, events, etc.). + It is multi-purposes, but also multi-frontends (you can use it on desktop, mobile devices, web browser, or terminal) and multi-platforms.

+ +

Salut à Toi (SàT) is made with a strong sense of ethics (see social contract), is decentralized (you can run your own service, keep your important data yourself, while still being able to communicate with the rest of the network), and use a standard communication protocol (XMPP), making it compatible with many other software.

+

This part is the desktop/mobile frontend.

+
+ org.salutatoi.Cagou.desktop + + + A chat conversation seen with Cagou on Android + https://repos.goffi.org/sat_docs/raw-file/tip/screenshots/0.7/cagou_0.7.0b1_android.jpg + + + A chat conversation seen with Libervia frontend + https://repos.goffi.org/sat_docs/raw-file/tip/screenshots/0.7/libervia_pages_chat.png + + + A photos album seen with Libervia frontend + https://repos.goffi.org/sat_docs/raw-file/tip/screenshots/0.7/libervia_pages_photos_album.png + + + https://salut-a-toi.org + https://bugs.goffi.org + SAT + + cagou + +
diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/org.salutatoi.Cagou.desktop --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flatpak/org.salutatoi.Cagou.desktop Sat Jun 22 15:59:07 2019 +0200 @@ -0,0 +1,8 @@ +[Desktop Entry] +Type=Application +Name=Cagou (SàT) +Comment=desktop/mobile frontend for Salut à Toi (XMPP) +Exec=sat_wrapper +Icon=org.salutatoi.Cagou +Categories=Network;InstantMessaging;Chat;FileTransfer;RemoteAccess; +Keywords=xmpp;chat;messaging;im;file;sharing;communication;jabber;social;network;decentralization;desktop; diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/org.salutatoi.Cagou.json --- a/flatpak/org.salutatoi.Cagou.json Mon May 06 09:06:33 2019 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,221 +0,0 @@ -{ - "app-id": "org.salutatoi.Cagou", - "branch": "0.7.0a2", - "runtime": "org.salutatoi.Platform", - "runtime-version": "0.7.0a2", - "sdk": "org.salutatoi.Sdk", - "command": "cagou", - "finish-args": [ - "--socket=session-bus", - "--socket=x11", - "--share=ipc", - "--socket=wayland", - "--share=network", - "--filesystem=home" - ], - "modules": [ - { - "name": "mtdev", - "sources": [ - { - "type": "archive", - "url": "http://bitmath.org/code/mtdev/mtdev-1.1.5.tar.bz2", - "sha256": "6677d5708a7948840de734d8b4675d5980d4561171c5a8e89e54adf7a13eba7f" - } - ] - }, - { - "name": "xsel", - "sources": [ - { - "type": "archive", - "url": "http://www.vergenet.net/~conrad/software/xsel/download/xsel-1.2.0.tar.gz", - "sha256": "b927ce08dc82f4c30140223959b90cf65e1076f000ce95e520419ec32f5b141c" - } - ] - }, - { - "name": "python2-cython", - "buildsystem": "simple", - "build-commands": [ - "python ./setup.py install --prefix=${FLATPAK_DEST}" - ], - "cleanup": [ - "/bin" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/d2/12/8ef44cede251b93322e8503fd6e1b25a0249fa498bebec191a5a06adbe51/Cython-0.28.4.tar.gz", - "sha256": "76ac2b08d3d956d77b574bb43cbf1d37bd58b9d50c04ba281303e695854ebc46" - } - ] - }, - { - "name": "python2-certifi", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/4d/9c/46e950a6f4d6b4be571ddcae21e7bc846fcbb88f1de3eff0f6dd0a6be55d/certifi-2018.4.16.tar.gz", - "sha256": "13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - }, - { - "name": "python2-urllib3", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz", - "sha256": "a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - }, - { - "name": "python2-chardet", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz", - "sha256": "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - }, - { - "name": "python2-requests", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/54/1f/782a5734931ddf2e1494e4cd615a51ff98e1879cbe9eecbdfeaf09aa75e9/requests-2.19.1.tar.gz", - "sha256": "ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - }, - { - "name": "python2-kivy-garden", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/7d/68/decaee596ff8168a39432eb3949fc7c0be952ebb9467806823bffc165d48/kivy-garden-0.1.4.tar.gz", - "sha256": "c256f42788421273a08fbb0a228f0fb0e80dd86b629fb8c0920507f645be6c72" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - }, - { - "name": "python2-Kivy", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/7d/8b/89d220b7f96dc2662b81319067f679b4cd73cda66f4aa850db5b6c6cfc7a/Kivy-1.10.1.tar.gz", - "sha256": "7ce9e88b75de47a3f1d52cbe6924c18cafc83fa102e54f6794d241746e93fdff" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - }, - { - "name": "python2-plyer", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/c4/19/f7cf7658e0f671294d8675a0bcc5d76883e8e738cc1e78dd097e2ea5fa72/plyer-1.3.0.tar.gz", - "sha256": "bdc4d09d5c1c236025a32e4fb48a83a0caa5c48667b35de93db25c8f916ec750" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - }, - { - "name": "garden-contextmenu", - "buildsystem": "simple", - "build-commands": [ - "mkdir -p /app/lib/python2.7/site-packages/kivy/garden/contextmenu/", - "cp -av . /app/lib/python2.7/site-packages/kivy/garden/contextmenu/" - ], - "sources": [ - { - "type": "archive", - "url": "https://ftp.goffi.org/kivy_garden/contextmenu.zip", - "sha256": "2a6f9d631ba7f6b7f0a96cb6e9aa2483812b7a2fd5731e188d99c1fcce2bd620" - } - ] - }, - { - "name": "garden-modernmenu", - "buildsystem": "simple", - "build-commands": [ - "mkdir -p /app/lib/python2.7/site-packages/kivy/garden/modernmenu/", - "cp -av . /app/lib/python2.7/site-packages/kivy/garden/modernmenu/" - ], - "sources": [ - { - "type": "archive", - "url": "https://ftp.goffi.org/kivy_garden/modernmenu.zip", - "sha256": "283d3c143c7ac0b4fb1d555ae7d190a6b1348843213d4b514beb55f2e275a1b2" - } - ] - }, - { - "name": "cagou", - "buildsystem": "simple", - "build-commands": [ - "python2 setup.py install --prefix=/app" - ], - "sources": [ - { - "type": "archive", - "url": "https://files.pythonhosted.org/packages/81/e8/8c967847465dc8f192bd97638ca696ca8990c7ad379ffd33b90fd334136d/cagou-0.7.0a1.tar.gz", - "sha256": "ecf3b9079758b0f27cb903b6d0564e1115cae873c2ae1dddbe888787ffcc557c" - } - ], - "ensure-writable": [ - "/lib/python2.7/site-packages/easy-install.pth" - ] - } - ] -} diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/org.salutatoi.Jp.json --- a/flatpak/org.salutatoi.Jp.json Mon May 06 09:06:33 2019 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ -{ - "app-id": "org.salutatoi.Jp", - "branch": "0.7.0a2", - "runtime": "org.salutatoi.Platform", - "runtime-version": "0.7.0a2", - "sdk": "org.salutatoi.Sdk", - "command": "jp", - "finish-args": [ - "--socket=session-bus", - "--share=network", - "--filesystem=home" - ], - "modules": [ - { - "name": "jp", - "buildsystem": "simple", - "build-commands": [ - "install -D /usr/bin/jp /app/bin/jp" - ] - } - ] -} diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/org.salutatoi.Primitivus.json --- a/flatpak/org.salutatoi.Primitivus.json Mon May 06 09:06:33 2019 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -{ - "app-id": "org.salutatoi.Primitivus", - "branch": "0.7.0a2", - "runtime": "org.salutatoi.Platform", - "runtime-version": "0.7.0a2", - "sdk": "org.salutatoi.Sdk", - "command": "primitivus", - "finish-args": [ - "--socket=session-bus" - ], - "modules": [ - { - "name": "primitivus", - "buildsystem": "simple", - "build-commands": [ - "install -D /usr/bin/primitivus /app/bin/primitivus" - ] - } - ] -} diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/sat.conf --- a/flatpak/sat.conf Mon May 06 09:06:33 2019 +0200 +++ b/flatpak/sat.conf Sat Jun 22 15:59:07 2019 +0200 @@ -1,5 +1,5 @@ [DEFAULT] -media_dir = /usr/share/sat-media +media_dir = /app/share/sat-media pid_dir = /tmp [libervia] diff -r 4c4c8ea4182a -r 274af514a5cf flatpak/sat_wrapper.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flatpak/sat_wrapper.py Sat Jun 22 15:59:07 2019 +0200 @@ -0,0 +1,26 @@ +#!/usr/bin/env python2 +# -*- coding: utf-8 -*- +"""This script launch SàT backend if it's not before running command""" + +import os +import sys +import dbus +import subprocess + +command = "##COMMAND##" +const_INT_PREFIX = "org.salutatoi.SAT" +const_OBJ_PATH = '/org/salutatoi/SAT/bridge' + + +try: + sessions_bus = dbus.SessionBus() + db_object = sessions_bus.get_object(const_INT_PREFIX, + const_OBJ_PATH) +except dbus.exceptions.DBusException as e: + if e._dbus_error_name != 'org.freedesktop.DBus.Error.ServiceUnknown': + raise e + # backend not found, we need to launch it + print("Launching SàT backend") + subprocess.check_call("sat") + +os.execlp(command, command, *sys.argv[1:])