Mercurial > sat_docs
comparison flatpak/build_manifest.py @ 161:4fe61dcd856e
flatpak: update build-manifest to retrieve dependencies without initial download phase:
- dependencies are now entirely retrieved from the installation, and requirement are build
from there. The source package is currently always used, and it is now possible to use
PYTHON_DEP_PREINSTALL to add extra packages or use a specific version of a package.
- fixed insertion of `release_elt` for dev version at the right location.
- `branch` is not set anymore for stable versions.
- libxslt is not added anymore as it is part of freedesktop runtime
- flathub share-modules is now used for Python 2
author | Goffi <goffi@goffi.org> |
---|---|
date | Fri, 23 Aug 2019 16:41:05 +0200 |
parents | 150085e39072 |
children | a213053a03be |
comparison
equal
deleted
inserted
replaced
160:4a5f9daa0ce0 | 161:4fe61dcd856e |
---|---|
56 } | 56 } |
57 } | 57 } |
58 } | 58 } |
59 }, | 59 }, |
60 } | 60 } |
61 # FIXME: Q&D way to install older version of twisted for Flatpak | |
62 PYTHON_DEP_PREINSTALL = ['twisted<19.7.0'] | |
61 PYTHON_DEP_REQUIREMENTS_UPDATE = { | 63 PYTHON_DEP_REQUIREMENTS_UPDATE = { |
62 # service-identity is not seen as a twisted requirement, so it's sometimes misplaced | 64 # service-identity is not seen as a twisted requirement, so it's sometimes misplaced |
63 'twisted': ['service-identity'], | 65 'twisted': ['service-identity'], |
64 } | 66 } |
65 PYTHON_SETUP_REQUIREMENTS = [ | 67 PYTHON_SETUP_REQUIREMENTS = [ |
78 "--socket=session-bus", | 80 "--socket=session-bus", |
79 "--share=network", | 81 "--share=network", |
80 "--filesystem=home" | 82 "--filesystem=home" |
81 ], | 83 ], |
82 "modules": [ | 84 "modules": [ |
83 { | 85 "shared-modules/python2.7/python-2.7.json", |
84 "name": "python", | |
85 # we use the same options as the ones of Python 2 from SDK | |
86 "config-opts": [ | |
87 "--enable-deterministic-archives", | |
88 "--disable-static", | |
89 "--enable-shared", | |
90 "--with-ensurepip=yes", | |
91 "--with-system-expat", | |
92 "--with-system-ffi", | |
93 "--enable-loadable-sqlite-extensions", | |
94 "--with-dbmliborder=gdbm", | |
95 "--enable-unicode=ucs4", | |
96 ], | |
97 "post-install": [ | |
98 # stipping won't work without this | |
99 "chmod 644 /app/lib/libpython2.7.so.1.0" | |
100 ], | |
101 "sources": [ | |
102 { | |
103 "type": "archive", | |
104 "url": "https://www.python.org/ftp/python/2.7.16/Python-2.7.16.tar.xz", | |
105 "sha256": "f222ef602647eecb6853681156d32de4450a2c39f4de93bd5b20235f2e660ed7" | |
106 } | |
107 ] | |
108 } | |
109 ] | 86 ] |
110 } | 87 } |
111 SHOW_REQUIRES_HEADER = 'Requires: ' | 88 SHOW_REQUIRES_HEADER = 'Requires: ' |
89 SHOW_VERSION_HEADER = 'Version: ' | |
112 SETTINGS_KEY = '_build_settings' | 90 SETTINGS_KEY = '_build_settings' |
113 APPDATA_RELEASE_DEV_TEXT = dedent("""\ | 91 APPDATA_RELEASE_DEV_TEXT = dedent("""\ |
114 This is a development version, used as a preview. | 92 This is a development version, used as a preview. |
115 Please note that it is incomplete and it probably contains bugs. | 93 Please note that it is incomplete and it probably contains bugs. |
116 """) | 94 """) |
125 url: str | 103 url: str |
126 requirements: List[str] | 104 requirements: List[str] |
127 | 105 |
128 def __hash__(self): | 106 def __hash__(self): |
129 return hash(self.name) | 107 return hash(self.name) |
108 | |
109 def __eq__(self, other): | |
110 return self.name == other.name | |
130 | 111 |
131 | 112 |
132 def print_step(step): | 113 def print_step(step): |
133 print() | 114 print() |
134 print("┌" + "─" * (len(step) + 2) + "┐") | 115 print("┌" + "─" * (len(step) + 2) + "┐") |
448 data = [dep_data] | 429 data = [dep_data] |
449 set_cache(step_name, data) | 430 set_cache(step_name, data) |
450 return data | 431 return data |
451 | 432 |
452 | 433 |
453 def get_requirements(pip, package_name): | 434 def get_requirements(pip, package_name, done=None, requirements=None, tree=None, indent=0): |
454 """Retrieve requirements for an installed python package | 435 """Retrieve requirements for an installed python package |
455 | 436 |
456 @param pip(Path): path to pip executable to use | 437 @param pip(Path): path to pip executable to use |
457 package must be installed in the environment of this pip | 438 package must be installed in the environment of this pip |
458 @param package_name(str): name of the package to retrieve | 439 @param package_name(str): name of the package to retrieve |
459 @return(list[str]): found requirements | 440 @return(list[(str,str)]): list of ordered requirements (package_name, version) |
460 """ | 441 """ |
442 print(f"found dependency: {package_name}") | |
443 if requirements is None: | |
444 assert done is None | |
445 assert tree is None | |
446 if not package_name == canonical(package_name): | |
447 raise ValueError("main package name is not canonical") | |
448 tree = [] | |
449 requirements = [] | |
450 done = [] | |
451 main = True | |
452 else: | |
453 main = False | |
454 done.append(package_name) | |
461 show_cplted = subprocess.run([pip, "show", package_name], | 455 show_cplted = subprocess.run([pip, "show", package_name], |
462 capture_output=True, text=True) | 456 capture_output=True, text=True) |
463 show_cplted.check_returncode() | 457 show_cplted.check_returncode() |
464 | 458 |
465 lines = show_cplted.stdout.split('\n') | 459 lines = show_cplted.stdout.split('\n') |
460 version_raw = next(l for l in lines if l.startswith(SHOW_VERSION_HEADER)) | |
461 version = version_raw[len(SHOW_VERSION_HEADER):] | |
466 requirement_raw = next(l for l in lines if l.startswith(SHOW_REQUIRES_HEADER)) | 462 requirement_raw = next(l for l in lines if l.startswith(SHOW_REQUIRES_HEADER)) |
467 requirement_raw = requirement_raw[len(SHOW_REQUIRES_HEADER):] | 463 requirement_raw = requirement_raw[len(SHOW_REQUIRES_HEADER):] |
468 requirements = [canonical(p) for p in requirement_raw.split(',') if p.strip()] | 464 requirements_canon = [canonical(p) for p in requirement_raw.split(',') if p.strip()] |
469 requirements_update = [ | 465 requirements_update = [ |
470 canonical(r) for r in PYTHON_DEP_REQUIREMENTS_UPDATE.get(package_name, [])] | 466 canonical(r) for r in PYTHON_DEP_REQUIREMENTS_UPDATE.get(package_name, [])] |
471 new_requirements = set(requirements_update).difference(requirements) | 467 new_requirements = set(requirements_update).difference(requirements_canon) |
472 if new_requirements: | 468 if new_requirements: |
473 print("adding extra requirements to {}: {}".format( | 469 print("adding extra requirements to {}: {}".format( |
474 package_name, ', '.join(new_requirements))) | 470 package_name, ', '.join(new_requirements))) |
475 requirements.extend(new_requirements) | 471 requirements_canon.extend(new_requirements) |
472 tree.append(f"{' '*indent}{package_name} ({version})") | |
473 for r in requirements_canon: | |
474 if r not in done: | |
475 get_requirements(pip, r, done, requirements, tree, indent+1) | |
476 req_tuple = (package_name, version, requirements_canon) | |
477 requirements.append(req_tuple) | |
478 if main: | |
479 print("\ndependency tree:\n") | |
480 print('\n'.join(tree)) | |
476 return requirements | 481 return requirements |
477 | |
478 | |
479 def resolve_requirements(package, deps_map, deps, indent=0): | |
480 """Recursively resolve requirements | |
481 | |
482 @param package(Package): retrieve dependencies of this package | |
483 @param deps_map(dict): map from dependency name to Package instance | |
484 @param deps(list[package]): ordered dependencies | |
485 this list is updated in place | |
486 @param indent(int): use internally to print dependencies tree | |
487 """ | |
488 if package in deps: | |
489 return | |
490 print(" " * indent + package.name) | |
491 reqs_data = [deps_map[r] for r in package.requirements] | |
492 | |
493 for data in reqs_data: | |
494 resolve_requirements(data, deps_map, deps, indent+1) | |
495 | |
496 deps.append(package) | |
497 | 482 |
498 | 483 |
499 def get_hg_id_date(path): | 484 def get_hg_id_date(path): |
500 """Get short identifier and date of current commit from given Mercurial repository | 485 """Get short identifier and date of current commit from given Mercurial repository |
501 | 486 |
589 # we ignore the created temporary directory is we have an other one specified | 574 # we ignore the created temporary directory is we have an other one specified |
590 tmp_dirname = args.deps_dir | 575 tmp_dirname = args.deps_dir |
591 tmp_dir = Path(tmp_dirname) | 576 tmp_dir = Path(tmp_dirname) |
592 env_dir = tmp_dir / 'env' | 577 env_dir = tmp_dir / 'env' |
593 pip = env_dir / 'bin' / 'pip' | 578 pip = env_dir / 'bin' / 'pip' |
594 download_dir = tmp_dir / 'archives' | 579 if env_dir.exists(): |
595 if download_dir.exists() and env_dir.exists(): | 580 print("packages are already installed") |
596 print("dependencies are already downloaded and packages are already " | |
597 "installed") | |
598 else: | 581 else: |
599 download_dir.mkdir() | |
600 print(f"working in temporary directory {tmp_dirname}") | 582 print(f"working in temporary directory {tmp_dirname}") |
601 venv_cplted = subprocess.run(["/usr/bin/env", "virtualenv2", env_dir]) | 583 venv_cplted = subprocess.run(["/usr/bin/env", "virtualenv2", env_dir]) |
602 venv_cplted.check_returncode() | 584 venv_cplted.check_returncode() |
603 print("\ndownloading packages") | |
604 command_args = [pip, "download", "-d", download_dir, | |
605 "--progress-bar", "emoji"] | |
606 if args.no_binary: | |
607 command_args.extend(["--no-binary", ":all:"]) | |
608 command_args.append(main_package_source) | |
609 down_cplted = subprocess.run(command_args) | |
610 down_cplted.check_returncode() | |
611 print("\ninstalling package\n") | 585 print("\ninstalling package\n") |
586 if PYTHON_DEP_PREINSTALL: | |
587 print("preinstalling packages") | |
588 for dep in PYTHON_DEP_PREINSTALL: | |
589 inst_cplted = subprocess.run([pip, "install", dep]) | |
590 inst_cplted.check_returncode() | |
591 | |
612 # we install package to have a good map of requirements, pypi metadata | 592 # we install package to have a good map of requirements, pypi metadata |
613 # are incomplete. Packages should be retrieved from pip cache | 593 # are incomplete. Packages should be retrieved from pip cache |
614 inst_cplted = subprocess.run([pip, "install", main_package_source]) | 594 inst_cplted = subprocess.run( |
595 [pip, "install", "--progress-bar", "emoji", main_package_source]) | |
615 inst_cplted.check_returncode() | 596 inst_cplted.check_returncode() |
616 | 597 |
617 print_step("analyzing python dependencies") | 598 print_step("analyzing python dependencies") |
618 deps_map = {} | 599 deps_map = {} |
619 | 600 |
620 for archive_path in download_dir.iterdir(): | 601 requirements = get_requirements(pip, main_package) |
621 name, right_part = archive_path.name.rsplit('-', 1) | 602 |
622 name_canonical = canonical(name) | 603 |
623 if right_part.endswith('.tar.gz'): | 604 print("\nretrieveing metadata on PyPi\n") |
624 version = right_part[:-7] | 605 |
625 elif right_part.endswith('.tar.bz2'): | 606 if not args.no_binary: |
626 version = right_part[:-8] | 607 print(r"/!\ retrieving binary packages is not currently supported") |
627 elif right_part.endswith('.zip'): | 608 |
628 version = right_part[:-4] | 609 deps = [] |
629 else: | 610 for name, version, pkg_requirements in requirements: |
630 raise ValueError( | 611 print(f"{name}: ", end='') |
631 f"Archive are supposed to be .tar.gz, .tar.bz2 or .zip archives, but " | |
632 f"file found is {archive_path.name}, did something change on Pypy?") | |
633 with open(archive_path, "rb") as f: | |
634 dep_hash = hashlib.sha256(f.read()).hexdigest() | |
635 r = requests.get(f"https://pypi.org/pypi/{name}/{version}/json") | 612 r = requests.get(f"https://pypi.org/pypi/{name}/{version}/json") |
636 r.raise_for_status() | 613 r.raise_for_status() |
637 dep_json = r.json() | 614 dep_json = r.json() |
638 release_json = dep_json["releases"][version] | 615 release_json = dep_json["releases"][version] |
639 try: | 616 source_release = next(r for r in release_json if r['packagetype'] == 'sdist') |
640 version_json = next(v for v in release_json | 617 dep_hash = source_release['digests']['sha256'] |
641 if v['digests']['sha256'] == dep_hash) | 618 url = source_release['url'] |
642 except IndexError: | 619 dep = Package(name=name, |
643 raise ValueError(f"Can't find the version we downloaded for {name}") | |
644 | |
645 requirements = get_requirements(pip, name_canonical) | |
646 | |
647 dep = Package(name=name_canonical, | |
648 version=version, | 620 version=version, |
649 hash_=dep_hash, | 621 hash_=dep_hash, |
650 url=version_json['url'], | 622 url=url, |
651 requirements=requirements, | 623 requirements=pkg_requirements, |
652 ) | 624 ) |
653 | 625 deps.append(dep) |
654 deps_map[name_canonical] = dep | 626 deps_map[name] = dep |
655 print(f"found dependency: {dep.name} {dep.version}") | 627 print(f"version: {version}, type: {source_release['packagetype']}") |
656 | |
657 print_step("ordering dependencies") | |
658 | |
659 requirements = get_requirements(pip, main_package) | |
660 main_pack = Package(name=main_package, | |
661 version=args.version, | |
662 hash_="", | |
663 url="", | |
664 requirements=requirements, | |
665 ) | |
666 | |
667 deps = [] | |
668 print("resolving requirements\n") | |
669 resolve_requirements(main_pack, deps_map, deps) | |
670 missing_deps = set(deps_map.values()).difference(deps) | |
671 if missing_deps: | |
672 print("\n/!\\ some dependencies are missing this should not happen! " | |
673 "Adding them\n") | |
674 print("additional requirements") | |
675 for pack in missing_deps: | |
676 resolve_requirements(pack, deps_map, deps) | |
677 | 628 |
678 # we remove deps already installed with PYTHON_SETUP_REQUIREMENTS | 629 # we remove deps already installed with PYTHON_SETUP_REQUIREMENTS |
679 for name in PYTHON_SETUP_REQUIREMENTS: | 630 for name in PYTHON_SETUP_REQUIREMENTS: |
631 if not isinstance(name, str): | |
632 # this is a package with version in a list, first item is the name | |
633 name = name[0] | |
680 package_data = deps_map.get(name) | 634 package_data = deps_map.get(name) |
681 if package_data is not None: | 635 if package_data is not None: |
682 deps.remove(package_data) | 636 deps.remove(package_data) |
683 print(f"removed {name} which is already a setup requirement") | 637 print(f"removed {name} which is already a setup requirement") |
684 | 638 |
685 # main package is installed at the end | 639 # main package is installed at the end of the manifest |
686 deps.remove(main_pack) | 640 # so we remove it (it is the last package in deps) |
687 | 641 main_pack = deps.pop() |
688 print("\npackages are now ordered: {}".format(", ".join(d.name for d in deps))) | 642 assert main_pack.name == main_package |
689 | 643 |
690 print("\nwe now generate modules for python dependencies") | 644 print("\nwe now generate modules for python dependencies\n") |
691 data = [] | 645 data = [] |
692 version_force = { | 646 version_force = { |
693 canonical(n): v | 647 canonical(n): v |
694 for n,v in build_settings.get('python_deps_version_force', {}).items() | 648 for n,v in build_settings.get('python_deps_version_force', {}).items() |
695 } | 649 } |
874 print(f"addind release data for dev version ({dev_version_rev})") | 828 print(f"addind release data for dev version ({dev_version_rev})") |
875 releases_elt = root.find('releases') | 829 releases_elt = root.find('releases') |
876 if releases_elt is None: | 830 if releases_elt is None: |
877 raise ValueError( | 831 raise ValueError( |
878 "<releases/> element is missing in appdata template, please add it") | 832 "<releases/> element is missing in appdata template, please add it") |
879 release_elt = etree.SubElement( | 833 release_elt = etree.Element( |
880 releases_elt, | |
881 "release", | 834 "release", |
882 {'type': 'development', | 835 {'type': 'development', |
883 'version': dev_version_rev, | 836 'version': dev_version_rev, |
884 'date': dev_version_date}, | 837 'date': dev_version_date}, |
885 ) | 838 ) |
839 releases_elt.insert(0, release_elt) | |
886 description_elt = etree.SubElement(release_elt, 'description') | 840 description_elt = etree.SubElement(release_elt, 'description') |
887 text_lines = APPDATA_RELEASE_DEV_TEXT.strip().split('\n') | 841 text_lines = APPDATA_RELEASE_DEV_TEXT.strip().split('\n') |
888 for text in text_lines: | 842 for text in text_lines: |
889 etree.SubElement(description_elt, 'p').text = text | 843 etree.SubElement(description_elt, 'p').text = text |
890 | 844 |
968 else: | 922 else: |
969 main_package_source = main_package | 923 main_package_source = main_package |
970 | 924 |
971 manifest = {} | 925 manifest = {} |
972 manifest['app-id'] = app_id | 926 manifest['app-id'] = app_id |
973 manifest['default-branch'] = args.version | 927 if args.version == 'dev': |
928 manifest['default-branch'] = args.version | |
974 # we update DEFAULT_MANIFEST only now to have "app-id" and "default-branch" on the top | 929 # we update DEFAULT_MANIFEST only now to have "app-id" and "default-branch" on the top |
975 # of the manifest, also we don't want modules to be set now | 930 # of the manifest, also we don't want modules to be set now |
976 default_modules = DEFAULT_MANIFEST.pop('modules', []) | 931 default_modules = DEFAULT_MANIFEST.pop('modules', []) |
977 manifest.update(DEFAULT_MANIFEST) | 932 manifest.update(DEFAULT_MANIFEST) |
978 manifest.update(template) | 933 manifest.update(template) |
979 | 934 |
980 modules.extend(get_libxslt()) | 935 # modules.extend(get_libxslt()) |
981 | 936 |
982 # setup requirements | 937 # setup requirements |
983 modules.extend(get_python_package( | 938 modules.extend(get_python_package( |
984 PYTHON_SETUP_REQUIREMENTS, | 939 PYTHON_SETUP_REQUIREMENTS, |
985 step_name=f"setup_requirements__{app_id}", | 940 step_name=f"setup_requirements__{app_id}", |
993 modules.extend(manifest.get('modules', [])) | 948 modules.extend(manifest.get('modules', [])) |
994 modules = default_modules + modules | 949 modules = default_modules + modules |
995 manifest['modules'] = modules | 950 manifest['modules'] = modules |
996 | 951 |
997 # sat common things | 952 # sat common things |
998 existing_modules = {d['name'] for d in modules} | 953 existing_modules = {d['name'] for d in modules if not isinstance(d, str)} |
999 if "sat_templates" not in existing_modules: | 954 if "sat_templates" not in existing_modules: |
1000 modules.extend(get_python_package("sat_templates", with_pre=True)) | 955 modules.extend(get_python_package("sat_templates", with_pre=True)) |
1001 modules.extend(get_sat_media()) | 956 modules.extend(get_sat_media()) |
1002 modules.extend(file_upload('sat.conf', '/app')) | 957 modules.extend(file_upload('sat.conf', '/app')) |
1003 | 958 |
1039 to_export = [ | 994 to_export = [ |
1040 ("package file", package_file), | 995 ("package file", package_file), |
1041 ] | 996 ] |
1042 | 997 |
1043 for m in modules: | 998 for m in modules: |
999 if isinstance(m, str): | |
1000 continue | |
1044 for s in m.get('sources', []): | 1001 for s in m.get('sources', []): |
1045 s_type = s.get('type') | 1002 s_type = s.get('type') |
1046 if s_type in ('path', 'file'): | 1003 if s_type in ('path', 'file'): |
1047 try: | 1004 try: |
1048 path = s['path'] | 1005 path = s['path'] |
1090 if args.symlink in ('all', 'cache'): | 1047 if args.symlink in ('all', 'cache'): |
1091 os.symlink(get_cache_dir().resolve(), dest) | 1048 os.symlink(get_cache_dir().resolve(), dest) |
1092 else: | 1049 else: |
1093 shutil.copytree(get_cache_dir(), dest) | 1050 shutil.copytree(get_cache_dir(), dest) |
1094 | 1051 |
1095 print("\nexport completed") | 1052 print("\nexport completed") |