# HG changeset patch # User Goffi # Date 1566571265 -7200 # Node ID 4fe61dcd856ef71bf915d4e00ee613b39f6aa451 # Parent 4a5f9daa0ce02e2a27bf823c0ffd6f86d3d05bc9 flatpak: update build-manifest to retrieve dependencies without initial download phase: - dependencies are now entirely retrieved from the installation, and requirement are build from there. The source package is currently always used, and it is now possible to use PYTHON_DEP_PREINSTALL to add extra packages or use a specific version of a package. - fixed insertion of `release_elt` for dev version at the right location. - `branch` is not set anymore for stable versions. - libxslt is not added anymore as it is part of freedesktop runtime - flathub share-modules is now used for Python 2 diff -r 4a5f9daa0ce0 -r 4fe61dcd856e flatpak/build_manifest.py --- a/flatpak/build_manifest.py Fri Aug 23 16:28:36 2019 +0200 +++ b/flatpak/build_manifest.py Fri Aug 23 16:41:05 2019 +0200 @@ -58,6 +58,8 @@ } }, } +# FIXME: Q&D way to install older version of twisted for Flatpak +PYTHON_DEP_PREINSTALL = ['twisted<19.7.0'] PYTHON_DEP_REQUIREMENTS_UPDATE = { # service-identity is not seen as a twisted requirement, so it's sometimes misplaced 'twisted': ['service-identity'], @@ -80,35 +82,11 @@ "--filesystem=home" ], "modules": [ - { - "name": "python", - # we use the same options as the ones of Python 2 from SDK - "config-opts": [ - "--enable-deterministic-archives", - "--disable-static", - "--enable-shared", - "--with-ensurepip=yes", - "--with-system-expat", - "--with-system-ffi", - "--enable-loadable-sqlite-extensions", - "--with-dbmliborder=gdbm", - "--enable-unicode=ucs4", - ], - "post-install": [ - # stipping won't work without this - "chmod 644 /app/lib/libpython2.7.so.1.0" - ], - "sources": [ - { - "type": "archive", - "url": "https://www.python.org/ftp/python/2.7.16/Python-2.7.16.tar.xz", - "sha256": "f222ef602647eecb6853681156d32de4450a2c39f4de93bd5b20235f2e660ed7" - } - ] - } + "shared-modules/python2.7/python-2.7.json", ] } SHOW_REQUIRES_HEADER = 'Requires: ' +SHOW_VERSION_HEADER = 'Version: ' SETTINGS_KEY = '_build_settings' APPDATA_RELEASE_DEV_TEXT = dedent("""\ This is a development version, used as a preview. @@ -128,6 +106,9 @@ def __hash__(self): return hash(self.name) + def __eq__(self, other): + return self.name == other.name + def print_step(step): print() @@ -450,52 +431,56 @@ return data -def get_requirements(pip, package_name): +def get_requirements(pip, package_name, done=None, requirements=None, tree=None, indent=0): """Retrieve requirements for an installed python package @param pip(Path): path to pip executable to use package must be installed in the environment of this pip @param package_name(str): name of the package to retrieve - @return(list[str]): found requirements + @return(list[(str,str)]): list of ordered requirements (package_name, version) """ + print(f"found dependency: {package_name}") + if requirements is None: + assert done is None + assert tree is None + if not package_name == canonical(package_name): + raise ValueError("main package name is not canonical") + tree = [] + requirements = [] + done = [] + main = True + else: + main = False + done.append(package_name) show_cplted = subprocess.run([pip, "show", package_name], capture_output=True, text=True) show_cplted.check_returncode() lines = show_cplted.stdout.split('\n') + version_raw = next(l for l in lines if l.startswith(SHOW_VERSION_HEADER)) + version = version_raw[len(SHOW_VERSION_HEADER):] requirement_raw = next(l for l in lines if l.startswith(SHOW_REQUIRES_HEADER)) requirement_raw = requirement_raw[len(SHOW_REQUIRES_HEADER):] - requirements = [canonical(p) for p in requirement_raw.split(',') if p.strip()] + requirements_canon = [canonical(p) for p in requirement_raw.split(',') if p.strip()] requirements_update = [ canonical(r) for r in PYTHON_DEP_REQUIREMENTS_UPDATE.get(package_name, [])] - new_requirements = set(requirements_update).difference(requirements) + new_requirements = set(requirements_update).difference(requirements_canon) if new_requirements: print("adding extra requirements to {}: {}".format( package_name, ', '.join(new_requirements))) - requirements.extend(new_requirements) + requirements_canon.extend(new_requirements) + tree.append(f"{' '*indent}{package_name} ({version})") + for r in requirements_canon: + if r not in done: + get_requirements(pip, r, done, requirements, tree, indent+1) + req_tuple = (package_name, version, requirements_canon) + requirements.append(req_tuple) + if main: + print("\ndependency tree:\n") + print('\n'.join(tree)) return requirements -def resolve_requirements(package, deps_map, deps, indent=0): - """Recursively resolve requirements - - @param package(Package): retrieve dependencies of this package - @param deps_map(dict): map from dependency name to Package instance - @param deps(list[package]): ordered dependencies - this list is updated in place - @param indent(int): use internally to print dependencies tree - """ - if package in deps: - return - print(" " * indent + package.name) - reqs_data = [deps_map[r] for r in package.requirements] - - for data in reqs_data: - resolve_requirements(data, deps_map, deps, indent+1) - - deps.append(package) - - def get_hg_id_date(path): """Get short identifier and date of current commit from given Mercurial repository @@ -591,103 +576,72 @@ tmp_dir = Path(tmp_dirname) env_dir = tmp_dir / 'env' pip = env_dir / 'bin' / 'pip' - download_dir = tmp_dir / 'archives' - if download_dir.exists() and env_dir.exists(): - print("dependencies are already downloaded and packages are already " - "installed") + if env_dir.exists(): + print("packages are already installed") else: - download_dir.mkdir() print(f"working in temporary directory {tmp_dirname}") venv_cplted = subprocess.run(["/usr/bin/env", "virtualenv2", env_dir]) venv_cplted.check_returncode() - print("\ndownloading packages") - command_args = [pip, "download", "-d", download_dir, - "--progress-bar", "emoji"] - if args.no_binary: - command_args.extend(["--no-binary", ":all:"]) - command_args.append(main_package_source) - down_cplted = subprocess.run(command_args) - down_cplted.check_returncode() print("\ninstalling package\n") + if PYTHON_DEP_PREINSTALL: + print("preinstalling packages") + for dep in PYTHON_DEP_PREINSTALL: + inst_cplted = subprocess.run([pip, "install", dep]) + inst_cplted.check_returncode() + # we install package to have a good map of requirements, pypi metadata # are incomplete. Packages should be retrieved from pip cache - inst_cplted = subprocess.run([pip, "install", main_package_source]) + inst_cplted = subprocess.run( + [pip, "install", "--progress-bar", "emoji", main_package_source]) inst_cplted.check_returncode() print_step("analyzing python dependencies") deps_map = {} - for archive_path in download_dir.iterdir(): - name, right_part = archive_path.name.rsplit('-', 1) - name_canonical = canonical(name) - if right_part.endswith('.tar.gz'): - version = right_part[:-7] - elif right_part.endswith('.tar.bz2'): - version = right_part[:-8] - elif right_part.endswith('.zip'): - version = right_part[:-4] - else: - raise ValueError( - f"Archive are supposed to be .tar.gz, .tar.bz2 or .zip archives, but " - f"file found is {archive_path.name}, did something change on Pypy?") - with open(archive_path, "rb") as f: - dep_hash = hashlib.sha256(f.read()).hexdigest() + requirements = get_requirements(pip, main_package) + + + print("\nretrieveing metadata on PyPi\n") + + if not args.no_binary: + print(r"/!\ retrieving binary packages is not currently supported") + + deps = [] + for name, version, pkg_requirements in requirements: + print(f"{name}: ", end='') r = requests.get(f"https://pypi.org/pypi/{name}/{version}/json") r.raise_for_status() dep_json = r.json() release_json = dep_json["releases"][version] - try: - version_json = next(v for v in release_json - if v['digests']['sha256'] == dep_hash) - except IndexError: - raise ValueError(f"Can't find the version we downloaded for {name}") - - requirements = get_requirements(pip, name_canonical) - - dep = Package(name=name_canonical, + source_release = next(r for r in release_json if r['packagetype'] == 'sdist') + dep_hash = source_release['digests']['sha256'] + url = source_release['url'] + dep = Package(name=name, version=version, hash_=dep_hash, - url=version_json['url'], - requirements=requirements, + url=url, + requirements=pkg_requirements, ) - - deps_map[name_canonical] = dep - print(f"found dependency: {dep.name} {dep.version}") - - print_step("ordering dependencies") - - requirements = get_requirements(pip, main_package) - main_pack = Package(name=main_package, - version=args.version, - hash_="", - url="", - requirements=requirements, - ) - - deps = [] - print("resolving requirements\n") - resolve_requirements(main_pack, deps_map, deps) - missing_deps = set(deps_map.values()).difference(deps) - if missing_deps: - print("\n/!\\ some dependencies are missing this should not happen! " - "Adding them\n") - print("additional requirements") - for pack in missing_deps: - resolve_requirements(pack, deps_map, deps) + deps.append(dep) + deps_map[name] = dep + print(f"version: {version}, type: {source_release['packagetype']}") # we remove deps already installed with PYTHON_SETUP_REQUIREMENTS for name in PYTHON_SETUP_REQUIREMENTS: + if not isinstance(name, str): + # this is a package with version in a list, first item is the name + name = name[0] package_data = deps_map.get(name) if package_data is not None: deps.remove(package_data) print(f"removed {name} which is already a setup requirement") - # main package is installed at the end - deps.remove(main_pack) + # main package is installed at the end of the manifest + # so we remove it (it is the last package in deps) + main_pack = deps.pop() + assert main_pack.name == main_package - print("\npackages are now ordered: {}".format(", ".join(d.name for d in deps))) - - print("\nwe now generate modules for python dependencies") + print("\nwe now generate modules for python dependencies\n") data = [] version_force = { canonical(n): v @@ -876,13 +830,13 @@ if releases_elt is None: raise ValueError( " element is missing in appdata template, please add it") - release_elt = etree.SubElement( - releases_elt, + release_elt = etree.Element( "release", {'type': 'development', 'version': dev_version_rev, 'date': dev_version_date}, ) + releases_elt.insert(0, release_elt) description_elt = etree.SubElement(release_elt, 'description') text_lines = APPDATA_RELEASE_DEV_TEXT.strip().split('\n') for text in text_lines: @@ -970,14 +924,15 @@ manifest = {} manifest['app-id'] = app_id - manifest['default-branch'] = args.version + if args.version == 'dev': + manifest['default-branch'] = args.version # we update DEFAULT_MANIFEST only now to have "app-id" and "default-branch" on the top # of the manifest, also we don't want modules to be set now default_modules = DEFAULT_MANIFEST.pop('modules', []) manifest.update(DEFAULT_MANIFEST) manifest.update(template) - modules.extend(get_libxslt()) + # modules.extend(get_libxslt()) # setup requirements modules.extend(get_python_package( @@ -995,7 +950,7 @@ manifest['modules'] = modules # sat common things - existing_modules = {d['name'] for d in modules} + existing_modules = {d['name'] for d in modules if not isinstance(d, str)} if "sat_templates" not in existing_modules: modules.extend(get_python_package("sat_templates", with_pre=True)) modules.extend(get_sat_media()) @@ -1041,6 +996,8 @@ ] for m in modules: + if isinstance(m, str): + continue for s in m.get('sources', []): s_type = s.get('type') if s_type in ('path', 'file'): @@ -1092,4 +1049,4 @@ else: shutil.copytree(get_cache_dir(), dest) - print("\nexport completed") + print("\nexport completed")