comparison flatpak/build_manifest.py @ 138:274af514a5cf

flatpak: reworked packages + made a building script: a new `build_manifest.py` script can now be used to generate flatpak manifests for every frontend of SàT. The manifest can be used both for development versions and stable ones. Templates files (in the form `_tmp_<app-id>.json`) are used to set building instructions. A common runtime specific to SàT has been abandoned following a discussion on the official mailing list. A small wrapper is now used to launch backend automatically if it's not found. Desktop and app metadata have been added for Cagou. Jp and Primitivus don't have appdata and desktop files yet.
author Goffi <goffi@goffi.org>
date Sat, 22 Jun 2019 15:59:07 +0200
parents
children d36a68e396d5
comparison
equal deleted inserted replaced
137:4c4c8ea4182a 138:274af514a5cf
1 #!/usr/bin/env python3
2
3 import tempfile
4 import subprocess
5 from pathlib import Path
6 from typing import List
7 from dataclasses import dataclass
8 import hashlib
9 from ftplib import FTP
10 from urllib.parse import urlparse
11 import sys
12 import os
13 import json
14 import time
15 import argparse
16 import shutil
17 from packaging.version import parse as parse_version
18 import requests
19
20
21 CACHE_LIMIT = 3600 * 24
22 PYTHON_DEP_OVERRIDE = {
23 "lxml": {
24 "build-commands": [
25 "python2 ./setup.py install --prefix=${FLATPAK_DEST} --optimize=1"
26 ],
27 "build-options": {
28 "env": {
29 "XSLT_CONFIG": "pkg-config libxslt"
30 }
31 },
32 },
33 "dbus-python": {
34 "build-commands": [
35 "python2 setup.py build",
36 "python2 setup.py install --prefix=${FLATPAK_DEST}",
37 ]
38 },
39 "urwid": {
40 "sources_extend": [
41 {
42 "type": "patch",
43 "path": "main_loop.patch"
44 }
45 ]
46 },
47 "kivy": {
48 "post-install": [
49 # this file is not copied normally when installing with
50 # `python2 setup.py install`.
51 # TODO: report upstream
52 "cp kivy/setupconfig.py /app/lib/python2.7/site-packages/Kivy-*.egg/kivy/"
53 ]
54 },
55 }
56 PYTHON_DEP_REQUIREMENTS_UPDATE = {
57 # service-identity is not seen as a twisted requirement, so it's sometimes misplaced
58 'twisted': ['service-identity'],
59 }
60 PYTHON_SETUP_REQUIREMENTS = [
61 'setuptools', # to have an up-to-date version
62 'setuptools_scm',
63 'docutils', # needed my m2r
64 'mistune', # needed my m2r
65 'm2r', # needed by automat
66 ]
67 DEFAULT_MANIFEST = {
68 "runtime": "org.freedesktop.Platform",
69 "runtime-version": "1.6",
70 "sdk": "org.freedesktop.Sdk",
71 "sdk-extensions": [
72 "org.freedesktop.Sdk.Debug",
73 "org.freedesktop.Sdk.Locale",
74 "org.freedesktop.Sdk.Docs"
75 ],
76 "platform-extensions": [
77 "org.freedesktop.Platform.Locale"
78 ],
79 "command": "sat_wrapper",
80 "finish-args": [
81 "--socket=session-bus",
82 "--share=network",
83 "--filesystem=home"
84 ]
85 }
86 SHOW_REQUIRES_HEADER = 'Requires: '
87 SETTINGS_KEY = '_build_settings'
88
89
90 @dataclass
91 class Package:
92 name: str
93 version: str
94 hash_: str
95 url: str
96 requirements: List[str]
97
98 def __hash__(self):
99 return hash(self.name)
100
101
102 def print_step(step):
103 print()
104 print("┌" + "─" * (len(step) + 2) + "┐")
105 print("│ {} │".format(step))
106 print("└" + "─" * (len(step) + 2) + "┘")
107 print()
108
109
110 def parse_args():
111 parser = argparse.ArgumentParser(
112 description='Flatpak manifest builder for Salut à Toi')
113
114 build_group = parser.add_argument_group('building', 'options used to generate the manifest')
115 export_group = parser.add_argument_group('export', 'otions used to building files')
116
117 # build group
118 build_group.add_argument('-f', '--force', action="store_true",
119 help="force overwritting of existing manifest")
120 build_group.add_argument('--ignore-cache', action='append', default=[],
121 help='ignore the cache of this step ("all" to ignore all caches)')
122 build_group.add_argument(
123 '--deps-dir',
124 help="use this directory to build_group python dependencies (it won't be deleted at "
125 "the end, useful when you want to re-use it and avoir re-downloading)")
126 build_group.add_argument('--no-binary', help="don't use binary packages")
127
128 # export group
129 export_group.add_argument('-s', '--symlink', choices=['no', 'all', 'cache'],
130 default='no',
131 help='"no" to copy all files, "all" to symlink all files, "cache" to copy files '
132 'and symlink only cache (default: "no")')
133 export_group.add_argument('-F', '--force-export', action='store_true',
134 help='force overwritting of existing files/symlinks when exporting (DEFAULT: '
135 'existing files are skipped)')
136 export_group.add_argument('-e', '--export', type=Path,
137 help="export build files to this directory (DEFAULT: don't export files)")
138
139 # common
140 parser.add_argument('name', type=str, help="name of the package to build_group")
141 parser.add_argument('version', type=str, help="version of the package to build_group")
142
143 args = parser.parse_args()
144
145 if 'all' in args.ignore_cache and len(args.ignore_cache) != 1:
146 parser.error('"all" cannot be used with step names in --ignore-cache, '
147 'use it alone')
148
149 if args.export is not None and not args.export.is_dir():
150 parser.error(f"{args.export} is not a directory!")
151
152 return args
153
154
155 ## useful methods ##
156
157 def get_cache(name):
158 """Retrieve cache for a step
159
160 cache will be ignored if too old, or explicitly ignored by user
161 @param name(str): name of the step
162 @return (object): cached data
163 """
164 # name of the cache without prefix, mainly used for python_deps
165 shortname = name.split('__', 1)[0]
166
167 if shortname in args.ignore_cache or 'all' in args.ignore_cache:
168 print(f"ignoring cache for {shortname}")
169 args.ignore_cache.remove(shortname)
170 return None
171 try:
172 cache_name = cache[name]
173 cache_updated = cache_name['updated']
174 except KeyError:
175 return None
176 if time.time() - cache_updated > CACHE_LIMIT:
177 print(f"cache for {name} is too old, we won't use it")
178 return None
179 print(f"cache found for {shortname}")
180 return cache_name['data']
181
182
183 def set_cache(name, data):
184 """Update cache for a step
185
186 @param name(str): name of the step
187 @param data(object): data to cache
188 """
189 cache_name = cache.setdefault(name, {})
190 cache_name['updated'] = time.time()
191 cache_name['data'] = data
192 with cache_path.open('w') as f:
193 json.dump(cache, f)
194
195
196 def get_python_package(package_name, step_name=None, step_message=None, with_pre=False,
197 version=None):
198 """Generate module for a Python package
199
200 @param package_name(str, list[str]): name of the Python package
201 use list of str to get several packages at once
202 @param step_name(None, str): name of the step, None to use package_name
203 @param step_message(None, str): message of the step (None to use default one)
204 use empty string to disable it
205 @param with_pre(bool): if True retrieve also pre-releases
206 @param version(str, None): indicate the version of the package to download
207 if None, the most recent version compativle with `with_pre` will be used
208 """
209 single = isinstance(package_name, str)
210 if step_name is None:
211 step_name = package_name if single else ' - '.join(package_name)
212 if step_message != "":
213 print_step(step_message or f"retrieving latest version of {package_name}")
214 cache = get_cache(step_name)
215 if cache is not None:
216 return cache
217 package_names = [package_name] if single else package_name
218 data = []
219 for name in package_names:
220 r = requests.get(f"https://pypi.org/pypi/{name}/json")
221 r.raise_for_status()
222
223 releases_data = []
224 for version_raw, release_data in r.json()["releases"].items():
225 if version is not None and version != version_raw:
226 continue
227 release_version = parse_version(version_raw)
228 if release_version.is_prerelease and not with_pre:
229 continue
230 releases_data.append((release_version, release_data))
231
232 # we sort releases by version to be sure to have latests one
233 releases_data.sort(key=lambda d: d[0])
234
235 try:
236 release_version, release_data = releases_data[-1]
237 package = next(r for r in release_data if r["packagetype"] == "sdist")
238 except (IndexError, StopIteration):
239 raise RuntimeError(f"Can't find a matching package for {name}")
240
241 print(f"{name} {release_version} will be used")
242 dep_data = {
243 "name": name,
244 "buildsystem": "simple",
245 "build-commands": [
246 "python2 setup.py install --prefix=${FLATPAK_DEST}"
247 ],
248 "sources": [
249 {
250 "type": "archive",
251 "url": package["url"],
252 "sha256": package["digests"]["sha256"],
253 }
254 ],
255 "ensure-writable": [
256 "/lib/python2.7/site-packages/easy-install.pth",
257 "/lib/python2.7/site-packages/setuptools.pth",
258 ]
259 }
260 data.append(dep_data)
261
262 set_cache(step_name, data)
263 return data
264
265
266 def file_upload(filename, dest="/app/bin", src=None, replace=None, use_install=False):
267 """Generate manifest modules to upload a local file
268
269 @param filename(str): name of the local file to upload
270 Note that it will be overwritted if replace is used
271 @param dest(str, Path): path where the file must be installed in the container
272 @param src(str, None): when replace is used, name of the source file
273 @param replace(dict, None): mapping of text to replace
274 Use when file must be dynamicly adapted to the container, note that src must be
275 set to the name of the template file if this is used.
276 e.g. {'##COMMAND##', 'cagou'} will replace all '##COMMAND##' in filename
277 by 'cagou'
278 @param use_install(bool): if True, install file with `install src dest`
279 else, use `mv src dest`
280 @return (list(dict)): modules to add to the manifest
281 """
282 assert isinstance(filename, str) and '/' not in filename
283 print_step(f"generating {filename} module")
284 # no cache for this one, to be sure to have always latest version
285 filename = Path(filename)
286 if src is None:
287 file_to_test = filename
288 else:
289 src = Path(src)
290 file_to_test = src
291 if not file_to_test.exists():
292 raise RuntimeError(
293 f"{file_to_test} is missing, it is needed to build the manifest!")
294
295 if replace:
296 if src is None:
297 raise ValueError(
298 '"src" must be set to name of template file if replace is used')
299 print(f'doing replacement in template "{src}" to dest "{filename}"')
300 # there are strings to replace, we read file, do the changes and dump the
301 # result in <filename>
302 with open(src) as f:
303 buff = f.read()
304
305 for old, new in replace.items():
306 buff = buff.replace(old, new)
307
308 with open(filename, 'w') as f:
309 f.write(buff)
310 else:
311 if src is not None:
312 raise ValueError('"src" must not be used if replace is not used')
313
314 with filename.open('rb') as f:
315 hash_ = hashlib.sha256(f.read()).hexdigest()
316
317 dest = Path(dest)
318
319 dep_data = {
320 "name": str(filename),
321 "buildsystem": "simple",
322 "build-commands": [
323 ],
324 "sources": [
325 {
326 "type": "file",
327 "path": str(filename),
328 "sha256": hash_,
329 }
330 ]
331 }
332
333 build_commands = dep_data['build-commands']
334 if use_install:
335 build_commands.append(f"install -Dv {filename} {dest}")
336 else:
337 if dest.as_posix() not in ('/app', '/app/bin'):
338 # just in case the destination directory doesn't exist
339 build_commands.append(f"mkdir -p {dest.parent}")
340 build_commands.append(f"mv -v {filename} {dest}")
341
342 return [dep_data]
343
344
345 def file_from_url(url, dest=None, step_name=None, step_message=None):
346 """Generate manifest modules for a file either on a http(s) url or local
347
348 @param url(str): url of the file to use, or local path
349 if it starts with http, url will be used, else file_upload(url) will be used
350 @param dest(str, None): path were the file should be copied
351 """
352 parsed = urlparse(url)
353 if not parsed.scheme:
354 return file_upload(url)
355 if not parsed.scheme.startswith('http'):
356 raise NotImplementedError (
357 f'we can\'t use the URL "{url}", the scheme "{parsed.scheme}" is not managed')
358
359 filepath = Path(parsed.path)
360 stem = filepath.stem
361
362 if step_name is None:
363 # we use this name to easily ignore cache (with stem) while avoiding
364 # conflict if we have 2 URLs with the same stem
365 step_name = f"{stem}__{url}"
366
367 if step_message is None:
368 step_messate = f"generating module for {stem}"
369
370 print_step(step_message)
371 cache = get_cache(step_name)
372 if cache is not None:
373 return cache
374
375 r = requests.get(url)
376 r.raise_for_status()
377 file_hash = hashlib.sha256(r.content).hexdigest()
378
379 dep_data = {"name": stem}
380
381 if dest is not None:
382 dest = Path(dest)
383 dep_data.update({
384 'buildsystem': 'simple',
385 'build-commands':[
386 f'mkdir -p {dest.parent}',
387 f'mv "{filepath.name}" "{dest}"',
388 ]
389 })
390
391 dep_data['sources'] = [
392 {
393 "type": "file",
394 "url": url,
395 "sha256": file_hash,
396 }
397 ]
398
399 data = [dep_data]
400 set_cache(step_name, data)
401 return data
402
403
404 def get_requirements(pip, package_name):
405 """Retrieve requirements for an installed python package
406
407 @param pip(Path): path to pip executable to use
408 package must be installed in the environment of this pip
409 @param package_name(str): name of the package to retrieve
410 @return(list[str]): found requirements
411 """
412 show_cplted = subprocess.run([pip, "show", package_name],
413 capture_output=True, text=True)
414 show_cplted.check_returncode()
415
416 lines = show_cplted.stdout.split('\n')
417 requirement_raw = next(l for l in lines if l.startswith(SHOW_REQUIRES_HEADER))
418 requirement_raw = requirement_raw[len(SHOW_REQUIRES_HEADER):]
419 requirements = [canonical(p) for p in requirement_raw.split(',') if p.strip()]
420 requirements_update = [
421 canonical(r) for r in PYTHON_DEP_REQUIREMENTS_UPDATE.get(package_name, [])]
422 new_requirements = set(requirements_update).difference(requirements)
423 if new_requirements:
424 print("adding extra requirements to {}: {}".format(
425 package_name, ', '.join(new_requirements)))
426 requirements.extend(new_requirements)
427 return requirements
428
429
430 def resolve_requirements(package, deps_map, deps, indent=0):
431 """Recursively resolve requirements
432
433 @param package(Package): retrieve dependencies of this package
434 @param deps_map(dict): map from dependency name to Package instance
435 @param deps(list[package]): ordered dependencies
436 this list is updated in place
437 @param indent(int): use internally to print dependencies tree
438 """
439 if package in deps:
440 return
441 print(" " * indent + package.name)
442 reqs_data = [deps_map[r] for r in package.requirements]
443
444 for data in reqs_data:
445 resolve_requirements(data, deps_map, deps, indent+1)
446
447 deps.append(package)
448
449
450 def get_cache_dir():
451 """Return path to directory to use for cache"""
452 return Path(f"cache_{app_id}")
453
454
455 def canonical(name):
456 """Get canonical name of a package"""
457 return name.lower().replace('_', '-').strip()
458
459
460 modules = []
461 cache_path = Path.home() / Path('.cache/sat_flatpak_cache.json')
462 if not cache_path.exists():
463 cache = {}
464 else:
465 with cache_path.open() as f:
466 cache = json.load(f)
467
468
469 ## steps ##
470
471 def get_libxslt():
472 """Generate manifest module to install libxslt (needed for lxml)"""
473 step_name = 'libxslt'
474 print_step("retrieving latest version of libxslt")
475 cache = get_cache(step_name)
476 if cache is not None:
477 return cache
478
479 ftp = FTP("xmlsoft.org")
480 ftp.login()
481 ftp.cwd("libxml2")
482 libxslt_archives = [l for l in ftp.nlst() if 'xslt' in l
483 and l.endswith('tar.gz')
484 and 'git' not in l
485 and 'rc' not in l]
486 latest_libxslt = libxslt_archives[-1]
487 print(f"latest libxslt found: {latest_libxslt}")
488
489 with tempfile.TemporaryDirectory() as tmp_dirname:
490 tmp_dir = Path(tmp_dirname)
491 file_path = tmp_dir / latest_libxslt
492 with file_path.open('wb+') as f:
493 ftp.retrbinary('RETR ' + latest_libxslt, f.write)
494 f.seek(0)
495 libxslt_hash = hashlib.sha256(f.read()).hexdigest()
496
497 ftp.quit()
498
499 print(f"libxstl hash: {libxslt_hash}")
500
501 data = [{
502 "name": "libxslt",
503 "sources": [
504 {
505 "type": "archive",
506 "url": f"ftp://xmlsoft.org/libxml2/{latest_libxslt}",
507 "sha256": libxslt_hash,
508 }
509 ]
510 }]
511 set_cache(step_name, data)
512 return data
513
514
515 def get_python_deps():
516 """Generate manifest modules for python dependencies of main package"""
517 step_name = f'python_deps__{app_id}'
518 print_step("retrieving python dependencies")
519 cache = get_cache(step_name)
520 if cache is not None:
521 return cache
522
523 with tempfile.TemporaryDirectory() as tmp_dirname:
524 if args.deps_dir is not None:
525 # we ignore the created temporary directory is we have an other one specified
526 tmp_dirname = args.deps_dir
527 tmp_dir = Path(tmp_dirname)
528 env_dir = tmp_dir / 'env'
529 pip = env_dir / 'bin' / 'pip'
530 download_dir = tmp_dir / 'archives'
531 if download_dir.exists() and env_dir.exists():
532 print("dependencies are already downloaded and packages are already "
533 "installed")
534 else:
535 download_dir.mkdir()
536 print(f"working in temporary directory {tmp_dirname}")
537 venv_cplted = subprocess.run(["/usr/bin/env", "virtualenv2", env_dir])
538 venv_cplted.check_returncode()
539 print("\ndownloading packages")
540 command_args = [pip, "download", "-d", download_dir,
541 "--progress-bar", "emoji"]
542 if args.no_binary:
543 command_args.extend(["--no-binary", ":all:"])
544 command_args.append(main_package_source)
545 down_cplted = subprocess.run(command_args)
546 down_cplted.check_returncode()
547 print("\ninstalling package\n")
548 # we install package to have a good map of requirements, pypi metadata
549 # are incomplete. Packages should be retrieved from pip cache
550 inst_cplted = subprocess.run([pip, "install", main_package_source])
551 inst_cplted.check_returncode()
552
553 print_step("analyzing python dependencies")
554 deps_map = {}
555
556 for archive_path in download_dir.iterdir():
557 name, right_part = archive_path.name.rsplit('-', 1)
558 name_canonical = canonical(name)
559 if right_part.endswith('.tar.gz'):
560 version = right_part[:-7]
561 elif right_part.endswith('.tar.bz2'):
562 version = right_part[:-8]
563 elif right_part.endswith('.zip'):
564 version = right_part[:-4]
565 else:
566 raise ValueError(
567 f"Archive are supposed to be .tar.gz, .tar.bz2 or .zip archives, but "
568 f"file found is {archive_path.name}, did something change on Pypy?")
569 with open(archive_path, "rb") as f:
570 dep_hash = hashlib.sha256(f.read()).hexdigest()
571 r = requests.get(f"https://pypi.org/pypi/{name}/{version}/json")
572 r.raise_for_status()
573 dep_json = r.json()
574 release_json = dep_json["releases"][version]
575 try:
576 version_json = next(v for v in release_json
577 if v['digests']['sha256'] == dep_hash)
578 except IndexError:
579 raise ValueError(f"Can't find the version we downloaded for {name}")
580
581 requirements = get_requirements(pip, name_canonical)
582
583 dep = Package(name=name_canonical,
584 version=version,
585 hash_=dep_hash,
586 url=version_json['url'],
587 requirements=requirements,
588 # extra_requirements=extra_requirements,
589 )
590
591 deps_map[name_canonical] = dep
592 print(f"found dependency: {dep.name} {dep.version}")
593
594 print_step("ordering dependencies")
595
596 requirements = get_requirements(pip, main_package)
597 main_pack = Package(name=main_package,
598 version=args.version,
599 hash_="",
600 url="",
601 requirements=requirements,
602 )
603
604 deps = []
605 print("resolving requirements\n")
606 resolve_requirements(main_pack, deps_map, deps)
607 missing_deps = set(deps_map.values()).difference(deps)
608 if missing_deps:
609 print("\n/!\\ some dependencies are missing this should not happen! "
610 "Adding them\n")
611 print("additional requirements")
612 for pack in missing_deps:
613 resolve_requirements(pack, deps_map, deps)
614
615 # we remove deps already installed with PYTHON_SETUP_REQUIREMENTS
616 for name in PYTHON_SETUP_REQUIREMENTS:
617 package_data = deps_map.get(name)
618 if package_data is not None:
619 deps.remove(package_data)
620 print(f"removed {name} which is already a setup requirement")
621
622 # main package is installed at the end
623 deps.remove(main_pack)
624
625 print("\npackages are now ordered: {}".format(", ".join(d.name for d in deps)))
626
627 print("\nwe now generate modules for python dependencies")
628 data = []
629 version_force = {
630 canonical(n): v
631 for n,v in build_settings.get('python_deps_version_force', {}).items()
632 }
633
634 for dep in deps:
635 version_forced = version_force.get(dep.name)
636 if version_forced == 'pre-release':
637 print(f"using pre-release version for {dep.name} as requested in build "
638 "settings")
639 dep_data = get_python_package(dep.name, step_message='', with_pre=True)[0]
640 elif version_forced is not None:
641 print(f"using version {version_forced} for {dep.name} as requested in "
642 "build settings")
643 dep_data = get_python_package(dep.name, step_message='',
644 version=version_forced)[0]
645 else:
646 dep_data = {
647 "name": dep.name,
648 "buildsystem": "simple",
649 "build-commands": [
650 "python2 setup.py install --prefix=${FLATPAK_DEST}"
651 ],
652 "sources": [
653 {
654 "type": "archive",
655 "url": dep.url,
656 "sha256": dep.hash_,
657 }
658 ],
659 "ensure-writable": [
660 "/lib/python2.7/site-packages/easy-install.pth",
661 "/lib/python2.7/site-packages/setuptools.pth",
662 ]
663 }
664
665 if dep.name in PYTHON_DEP_OVERRIDE:
666 print(f"applying override for {dep.name}")
667 override = PYTHON_DEP_OVERRIDE[dep.name]
668
669 # key suffixed with "_extend" won't replace the data
670 for key in list(override.keys()):
671 if key.endswith('_extend'):
672 real_key = key[:-7]
673 extend_data = override.pop(key)
674 if real_key == 'sources':
675 for item_data in extend_data:
676 # we look for missing hashes and add them
677 if (item_data.get('type') in ('file', 'patch')
678 and 'sha256' not in item_data):
679 with open(item_data['path'], 'rb') as f:
680 hash_ = hashlib.sha256(f.read()).hexdigest()
681 item_data['sha256'] = hash_
682
683 dep_data.setdefault(real_key, []).extend(extend_data)
684
685 dep_data.update(override)
686
687 data.append(dep_data)
688
689 set_cache(step_name, data)
690 return data
691
692
693 def cache_from_repos():
694 """Get and cache locally a repository and returns cache path"""
695 print_step("retrieving code from repository")
696 dev_repos = build_settings['dev_repos']
697 repos_type = dev_repos.get('type', 'hg')
698 if repos_type != 'hg':
699 raise NotImplementedError("only Mercurial is managed so far")
700 url = dev_repos['url']
701 dest_path = get_cache_dir() / main_package
702 dest_path.mkdir(parents=True, exist_ok=True)
703 if (dest_path / '.hg').is_dir():
704 print("code is already in cache")
705 else:
706 try:
707 print(f"retrieving code from repository {url} into {dest_path}")
708 hg_cplted = subprocess.run(["hg", "clone", url, dest_path])
709 hg_cplted.check_returncode()
710 except Exception as e:
711 shutil.rmtree(dest_path)
712 raise e
713 return dest_path
714
715
716 def get_repos_module():
717 """Generate manifest module for the repository"""
718 dep_data = {
719 "name": main_package,
720 "buildsystem": "simple",
721 "build-commands": [
722 "python2 setup.py install --prefix=${FLATPAK_DEST}"
723 ],
724 "sources": [
725 {
726 "type": "dir",
727 "path": str(main_package_source)
728 }
729 ],
730 "ensure-writable": [
731 "/lib/python2.7/site-packages/easy-install.pth",
732 "/lib/python2.7/site-packages/setuptools.pth",
733 ]
734 }
735 return [dep_data]
736
737
738 def get_sat_media():
739 """Generate module for last version of sat_media available on the FTP"""
740 step_name = 'sat_media'
741 print_step("retrieving latest version of sat_media")
742 cache = get_cache(step_name)
743 if cache is not None:
744 return cache
745 url = "https://ftp.goffi.org/sat_media/sat_media.tar.bz2"
746 r = requests.get(url)
747 r.raise_for_status()
748 hash_ = hashlib.sha256(r.content).hexdigest()
749 dep_data = {
750 "name": "sat-media",
751 "buildsystem": "simple",
752 "build-commands": [
753 "cp -vr . ${FLATPAK_DEST}/share/sat-media"
754 ],
755 "sources": [
756 {
757 "type": "archive",
758 "url": url,
759 "sha256": hash_,
760 }
761 ],
762 }
763
764 data = [dep_data]
765 set_cache(step_name, data)
766 return data
767
768
769 def get_icon():
770 icon = build_settings.get('icon')
771 if icon is not None:
772 if isinstance(icon, str):
773 icon = {'url': icon}
774 icon_path = Path(urlparse(icon['url']).path)
775 suffix = icon_path.suffix[1:]
776 if suffix not in ('svg', 'png'):
777 raise ValueError("invalid icon, you must use a SVG or PNG image!")
778 if 'size' not in icon:
779 if suffix == 'svg':
780 icon['size'] = 'scalable'
781 else:
782 raise ValueError('icon size is not specified, please add a "size" key')
783
784 dest_path = f"/app/share/icons/hicolor/{icon['size']}/apps/{app_id}.{suffix}"
785
786 return file_from_url(
787 url = icon['url'],
788 dest = dest_path,
789 # we have common cache if several manifest use the same icon URL
790 step_name = f"icon__{icon}",
791 step_message = "retrieving application icon",
792 )
793
794
795 def get_app_metadata():
796 desktop_file = build_settings.get('desktop_file')
797 appdata_file = build_settings.get('appdata_file')
798 if desktop_file is None and app_data_file is None:
799 return
800
801 print_step("retrieving application metadata")
802 # we don't use cache here to be sure to have always up-to-date files
803
804 data = []
805
806 if desktop_file is not None:
807 print("generating module for desktop metadata")
808 data.extend(file_upload(
809 filename = desktop_file,
810 dest=f"/app/share/applications/{app_id}.desktop",
811 ))
812
813 if appdata_file is not None:
814 print("generating module for appdata metadata")
815 data.extend(file_upload(
816 filename = appdata_file,
817 dest=f"/app/share/metainfo/{app_id}.appdata.xml",
818 ))
819
820 return data
821
822
823 ## main_script ##
824
825
826 if __name__ == '__main__':
827
828 args = parse_args()
829 title = args.name.title()
830 app_id = f"org.salutatoi.{title}"
831 package_file = Path(f"{app_id}.json")
832
833 print(f"generating manifest for {app_id} ({args.version})")
834
835 if package_file.exists() and not args.force:
836 confirm = input(
837 f"{package_file} already exists, do you want to overwritte it (y/N)? ")
838 if confirm != 'y':
839 print("manifest building cancelled")
840 sys.exit(0)
841
842 tpl_file = Path(f"_tpl_{package_file}")
843
844 if not tpl_file.exists():
845 raise RuntimeError(f"Can't find template {tpl_file}, it is mandatory to build"
846 "the manifest!")
847
848 with tpl_file.open() as f:
849 template = json.load(f)
850
851 build_settings = template.pop(SETTINGS_KEY, {})
852 if "setup_requirements" in build_settings:
853 PYTHON_SETUP_REQUIREMENTS.extend(build_settings["setup_requirements"])
854 main_package = canonical(build_settings.get('package', args.name))
855 if args.version == 'dev' and 'dev_repos' in build_settings:
856 dev_repos = build_settings['dev_repos']
857 main_package_source = cache_from_repos()
858 else:
859 main_package_source = main_package
860
861 manifest = {}
862 manifest['app-id'] = app_id
863 manifest['default-branch'] = args.version
864 # we update DEFAULT_MANIFEST only now to have "app-id" and "default-branch" on the top
865 # of the manifest
866 manifest.update(DEFAULT_MANIFEST)
867 manifest.update(template)
868
869 modules.extend(get_libxslt())
870
871 # setup requirements
872 modules.extend(get_python_package(
873 PYTHON_SETUP_REQUIREMENTS,
874 step_name="setup_requirements",
875 step_message="generating modules for setup requirements")
876 )
877
878 # python dependencies
879 modules.extend(get_python_deps())
880
881 # at this point we add things specified in the template
882 modules.extend(manifest.get('modules', []))
883 manifest['modules'] = modules
884
885 # sat common things
886 existing_modules = {d['name'] for d in modules}
887 if "sat_templates" not in existing_modules:
888 modules.extend(get_python_package("sat_templates", with_pre=True))
889 modules.extend(get_sat_media())
890 modules.extend(file_upload('sat.conf', '/app'))
891
892 # wrapper to launch the backend if needed
893 wrapped_command = build_settings.get('wrapped_command')
894 if wrapped_command:
895 modules.extend(file_upload('sat_wrapper', src='sat_wrapper.py',
896 replace={'##COMMAND##': wrapped_command},
897 use_install=True))
898
899 # icon
900 modules.extend(get_icon())
901
902 # desktop file and appdata file
903 modules.extend(get_app_metadata())
904
905 # now the app itself
906 if args.version == 'dev' and 'dev_repos' in build_settings:
907 # mercurial is needed for dev version to install but also to
908 # retrieve revision used
909 modules.extend(get_python_package("mercurial"))
910 modules.extend(get_repos_module())
911 else:
912 modules.extend(get_python_package(main_package, version=args.version))
913
914 print_step("writing manifest")
915 with package_file.open('w') as f:
916 json.dump(manifest, f, indent=4)
917
918 if args.ignore_cache:
919 print("/!\\ those --ignore-cache arguments don't correspond to any step: {}".
920 format(', '.join(args.ignore_cache)))
921
922 print(f"manifest generated successfully at {package_file}")
923
924 if args.export is not None:
925 print_step("exporting building files")
926 print(f"exporting to {args.export}\n")
927 to_export = [
928 ("package file", package_file),
929 ]
930
931 for m in modules:
932 for s in m.get('sources', []):
933 s_type = s.get('type')
934 if s_type in ('path', 'file'):
935 try:
936 path = s['path']
937 except KeyError:
938 if 'url' in s:
939 continue
940 else:
941 raise ValueError(f"path missing for module source:\n{s}")
942 to_export.append((s_type, Path(path)))
943
944 for label, path in to_export:
945 print(f"exporting {label}: {path}")
946 dest = args.export / path
947 if dest.exists():
948 if args.force_export:
949 print(f" replacing existing {dest}")
950 if path.is_dir():
951 shutil.rmtree(dest)
952 else:
953 dest.unlink()
954 else:
955 print(" it already exists, skipping")
956 continue
957 if args.symlink == 'all':
958 os.symlink(path, dest)
959 else:
960 if path.is_dir():
961 shutil.copytree(path, dest)
962 else:
963 shutil.copyfile(path, dest)
964
965 if args.version == 'dev':
966 print("exporting cache")
967 dest = args.export / get_cache_dir()
968 if args.force_export and os.path.lexists(dest):
969 print(f" replacing existing {dest}")
970 if dest.is_symlink():
971 dest.unlink()
972 else:
973 shutil.rmtree(dest)
974
975 if dest.exists():
976 print(" it already exists, skipping")
977 else:
978 if args.symlink in ('all', 'cache'):
979 os.symlink(get_cache_dir().resolve(), dest)
980 else:
981 shutil.copytree(get_cache_dir(), dest)
982
983 print("\nexport completed")