build_tools.py 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961
  1. #!/usr/bin/env python
  2. from json import JSONDecodeError
  3. import math
  4. import pathlib
  5. import time
  6. import traceback
  7. from typing import Callable, Dict
  8. import pkg_resources
  9. import sys
  10. import os
  11. import io
  12. from os import walk
  13. from requests import Response
  14. NEWLINE_CHAR = '\n'
  15. def print_message(message,prefix=''):
  16. trimmed=re.sub(r'\n', r'%0A', message,flags=re.MULTILINE)
  17. print(f'{prefix}{trimmed}')
  18. def print_debug(message):
  19. print_message(message,'::debug::')
  20. def print_error(message):
  21. print_message(message,'::error::')
  22. try:
  23. import argparse
  24. import collections
  25. import copy
  26. import enum
  27. import glob
  28. import json
  29. import logging
  30. import re
  31. import shutil
  32. import stat
  33. import tempfile
  34. import zipfile
  35. from ast import literal_eval
  36. from collections import namedtuple
  37. from datetime import datetime, timedelta, timezone
  38. from json import JSONDecoder
  39. from operator import contains
  40. from platform import platform, release
  41. from pydoc import describe
  42. from time import strftime
  43. from typing import OrderedDict
  44. from urllib import response
  45. from urllib.parse import urlparse
  46. from urllib.request import Request
  47. from webbrowser import get
  48. import pygit2
  49. from pygit2 import Commit, Repository, GitError, Reference, UserPass, Index, Signature, RemoteCallbacks, Remote
  50. import requests
  51. from genericpath import isdir
  52. except ImportError as ex:
  53. print_error(
  54. f'Failed importing module {ex.name}, using interpreter {sys.executable}. {NEWLINE_CHAR} Installed packages:')
  55. installed_packages = pkg_resources.working_set
  56. installed_packages_list = sorted(
  57. ["%s==%s" % (i.key, i.version) for i in installed_packages])
  58. print(NEWLINE_CHAR.join(installed_packages_list))
  59. print(f'Environment: ')
  60. envlist = "\n".join([f"{k}={v}" for k, v in sorted(os.environ.items())])
  61. print(f'{envlist}')
  62. raise
  63. tool_version = "1.0.7"
  64. WEB_INSTALLER_DEFAULT_PATH = './web_installer/'
  65. FORMAT = '%(asctime)s %(message)s'
  66. logging.basicConfig(format=FORMAT)
  67. github_env = type('', (), {})()
  68. manifest = {
  69. "name": "",
  70. "version": "",
  71. "home_assistant_domain": "slim_player",
  72. "funding_url": "https://esphome.io/guides/supporters.html",
  73. "builds": [
  74. {
  75. "chipFamily": "ESP32",
  76. "parts": [
  77. ]
  78. }
  79. ]
  80. }
  81. artifacts_formats_outdir = '$OUTDIR'
  82. artifacts_formats_prefix = '$PREFIX'
  83. artifacts_formats = [
  84. ['build/squeezelite.bin', '$OUTDIR/$PREFIX-squeezelite.bin'],
  85. ['build/recovery.bin', '$OUTDIR/$PREFIX-recovery.bin'],
  86. ['build/ota_data_initial.bin', '$OUTDIR/$PREFIX-ota_data_initial.bin'],
  87. ['build/bootloader/bootloader.bin', '$OUTDIR/$PREFIX-bootloader.bin'],
  88. ['build/partition_table/partition-table.bin ',
  89. '$OUTDIR/$PREFIX-partition-table.bin'],
  90. ]
  91. class AttributeDict(dict):
  92. __slots__ = ()
  93. def __getattr__(self, name: str):
  94. try:
  95. return self[name.upper()]
  96. except Exception:
  97. try:
  98. return self[name.lower()]
  99. except Exception:
  100. for attr in self.keys():
  101. if name.lower() == attr.replace("'", "").lower():
  102. return self[attr]
  103. __setattr__ = dict.__setitem__
  104. parser = argparse.ArgumentParser(
  105. description='Handles some parts of the squeezelite-esp32 build process')
  106. parser.add_argument('--cwd', type=str,
  107. help='Working directory', default=os.getcwd())
  108. parser.add_argument('--loglevel', type=str, choices={
  109. 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'}, help='Logging level', default='INFO')
  110. subparsers = parser.add_subparsers(dest='command', required=True)
  111. parser_dir = subparsers.add_parser("list_files",
  112. add_help=False,
  113. description="List Files parser",
  114. help="Display the content of the folder")
  115. parser_manifest = subparsers.add_parser("manifest",
  116. add_help=False,
  117. description="Manifest parser",
  118. help="Handles the web installer manifest creation")
  119. parser_manifest.add_argument('--flash_file', required=True, type=str,
  120. help='The file path which contains the firmware flashing definition')
  121. parser_manifest.add_argument(
  122. '--max_count', type=int, help='The maximum number of releases to keep', default=3)
  123. parser_manifest.add_argument(
  124. '--manif_name', required=True, type=str, help='Manifest files name and prefix')
  125. parser_manifest.add_argument(
  126. '--outdir', required=True, type=str, help='Output directory for files and manifests')
  127. parser_pushinstaller = subparsers.add_parser("pushinstaller",
  128. add_help=False,
  129. description="Web Installer Checkout parser",
  130. help="Handles the creation of artifacts files")
  131. parser_pushinstaller.add_argument(
  132. '--target', type=str, help='Output directory for web installer repository', default=WEB_INSTALLER_DEFAULT_PATH)
  133. parser_pushinstaller.add_argument(
  134. '--artifacts', type=str, help='Target subdirectory for web installer artifacts', default=WEB_INSTALLER_DEFAULT_PATH)
  135. parser_pushinstaller.add_argument(
  136. '--source', type=str, help='Source directory for the installer artifacts', default=WEB_INSTALLER_DEFAULT_PATH)
  137. parser_pushinstaller.add_argument('--url', type=str, help='Web Installer clone url ',
  138. default='https://github.com/sle118/squeezelite-esp32-installer.git')
  139. parser_pushinstaller.add_argument(
  140. '--web_installer_branch', type=str, help='Web Installer branch to use ', default='main')
  141. parser_pushinstaller.add_argument(
  142. '--token', type=str, help='Auth token for pushing changes')
  143. parser_pushinstaller.add_argument(
  144. '--flash_file', type=str, help='Manifest json file path')
  145. parser_pushinstaller.add_argument(
  146. '--manif_name', required=True, type=str, help='Manifest files name and prefix')
  147. parser_environment = subparsers.add_parser("environment",
  148. add_help=False,
  149. description="Environment parser",
  150. help="Updates the build environment")
  151. parser_environment.add_argument(
  152. '--env_file', type=str, help='Environment File', default=os.environ.get('GITHUB_ENV'))
  153. parser_environment.add_argument(
  154. '--build', required=True, type=int, help='The build number')
  155. parser_environment.add_argument(
  156. '--node', required=True, type=str, help='The matrix node being built')
  157. parser_environment.add_argument(
  158. '--depth', required=True, type=int, help='The bit depth being built')
  159. parser_environment.add_argument(
  160. '--major', type=str, help='Major version', default='2')
  161. parser_environment.add_argument(
  162. '--docker', type=str, help='Docker image to use', default='sle118/squeezelite-esp32-idfv43')
  163. parser_show = subparsers.add_parser("show",
  164. add_help=False,
  165. description="Show parser",
  166. help="Show the build environment")
  167. parser_build_flags = subparsers.add_parser("build_flags",
  168. add_help=False,
  169. description="Build Flags",
  170. help="Updates the build environment with build flags")
  171. parser_build_flags.add_argument(
  172. '--mock', action='store_true', help='Mock release')
  173. parser_build_flags.add_argument(
  174. '--force', action='store_true', help='Force a release build')
  175. parser_build_flags.add_argument(
  176. '--ui_build', action='store_true', help='Include building the web UI')
  177. def format_commit(commit):
  178. # 463a9d8b7 Merge branch 'bugfix/ci_deploy_tags_v4.0' into 'release/v4.0' (2020-01-11T14:08:55+08:00)
  179. dt = datetime.fromtimestamp(float(commit.author.time), timezone(
  180. timedelta(minutes=commit.author.offset)))
  181. timestr = dt.strftime('%c%z')
  182. cmesg = commit.message.replace('\n', ' ')
  183. return f'{commit.short_id} {cmesg} ({timestr}) <{commit.author.name}>'.replace(' ', ' ', )
  184. def get_github_data(repo: Repository, api):
  185. base_url = urlparse(repo.remotes['origin'].url)
  186. print(
  187. f'Base URL is {base_url.path} from remote URL {repo.remotes["origin"].url}')
  188. url_parts = base_url.path.split('.')
  189. for p in url_parts:
  190. print(f'URL Part: {p}')
  191. api_url = f"{url_parts[0]}/{api}"
  192. print(f'API to call: {api_url}')
  193. url = f"https://api.github.com/repos{api_url}"
  194. resp = requests.get(
  195. url, headers={"Content-Type": "application/vnd.github.v3+json"})
  196. return json.loads(resp.text)
  197. def dump_directory(dir_path):
  198. # list to store files name
  199. res = []
  200. for (dir_path, dir_names, file_names) in walk(dir_path):
  201. res.extend(file_names)
  202. print(res)
  203. class ReleaseDetails():
  204. version: str
  205. idf: str
  206. platform: str
  207. branch: str
  208. bitrate: str
  209. def __init__(self, tag: str) -> None:
  210. self.version, self.idf, self.platform, self.branch = tag.split('#')
  211. try:
  212. self.version, self.bitrate = self.version.split('-')
  213. except Exception:
  214. pass
  215. def get_attributes(self):
  216. return {
  217. 'version': self.version,
  218. 'idf': self.idf,
  219. 'platform': self.platform,
  220. 'branch': self.branch,
  221. 'bitrate': self.bitrate
  222. }
  223. def format_prefix(self) -> str:
  224. return f'{self.branch}-{self.platform}-{self.version}'
  225. def get_full_platform(self):
  226. return f"{self.platform}{f'-{self.bitrate}' if self.bitrate is not None else ''}"
  227. class BinFile():
  228. name: str
  229. offset: int
  230. source_full_path: str
  231. target_name: str
  232. target_fullpath: str
  233. artifact_relpath: str
  234. def __init__(self, source_path, file_build_path: str, offset: int, release_details: ReleaseDetails, build_dir) -> None:
  235. self.name = os.path.basename(file_build_path).rstrip()
  236. self.artifact_relpath = os.path.relpath(
  237. file_build_path, build_dir).rstrip()
  238. self.source_path = source_path
  239. self.source_full_path = os.path.join(
  240. source_path, file_build_path).rstrip()
  241. self.offset = offset
  242. self.target_name = f'{release_details.format_prefix()}-{self.name}'.rstrip()
  243. def get_manifest(self):
  244. return {"path": self.target_name, "offset": self.offset}
  245. def copy(self, target_folder) -> str:
  246. self.target_fullpath = os.path.join(target_folder, self.target_name)
  247. print_debug(
  248. f'File {self.source_full_path} will be copied to {self.target_fullpath}')
  249. try:
  250. os.makedirs(target_folder, exist_ok=True)
  251. shutil.copyfile(self.source_full_path,
  252. self.target_fullpath, follow_symlinks=True)
  253. except Exception as ex:
  254. print_error(f"Error while copying {self.source_full_path} to {self.target_fullpath}{NEWLINE_CHAR}Content of {os.path.dirname(self.source_full_path.rstrip())}:{NEWLINE_CHAR}{NEWLINE_CHAR.join(get_file_list(os.path.dirname(self.source_full_path.rstrip())))}")
  255. raise
  256. return self.target_fullpath
  257. def get_attributes(self):
  258. return {
  259. 'name': self.target_name,
  260. 'offset': self.offset,
  261. 'artifact_relpath': self.artifact_relpath
  262. }
  263. class PlatformRelease():
  264. name: str
  265. description: str
  266. url: str = ''
  267. zipfile: str = ''
  268. tempfolder: str
  269. release_details: ReleaseDetails
  270. flash_parms = {}
  271. build_dir: str
  272. has_artifacts: bool
  273. branch: str
  274. assets: list
  275. bin_files: list
  276. name_prefix: str
  277. flash_file_path: str
  278. def get_manifest_name(self) -> str:
  279. return f'{self.name_prefix}-{self.release_details.format_prefix()}.json'
  280. def __init__(self, flash_file_path, git_release, build_dir, branch, name_prefix) -> None:
  281. self.name = git_release.tag_name
  282. self.description = git_release.body
  283. self.assets = git_release['assets']
  284. self.has_artifacts = False
  285. self.name_prefix = name_prefix
  286. if len(self.assets) > 0:
  287. if self.has_asset_type():
  288. self.url = self.get_asset_from_extension().browser_download_url
  289. if self.has_asset_type('.zip'):
  290. self.zipfile = self.get_asset_from_extension(
  291. ext='.zip').browser_download_url
  292. self.has_artifacts = True
  293. self.release_details = ReleaseDetails(git_release.name)
  294. self.bin_files = list()
  295. self.flash_file_path = flash_file_path
  296. self.build_dir = os.path.relpath(build_dir)
  297. self.branch = branch
  298. def process_files(self, outdir: str) -> list:
  299. parts = []
  300. for f in self.bin_files:
  301. f.copy(outdir)
  302. parts.append(f.get_manifest())
  303. return parts
  304. def get_asset_from_extension(self, ext='.bin'):
  305. for a in self.assets:
  306. filename = AttributeDict(a).name
  307. file_name, file_extension = os.path.splitext(filename)
  308. if file_extension == ext:
  309. return AttributeDict(a)
  310. return None
  311. def has_asset_type(self, ext='.bin') -> bool:
  312. return self.get_asset_from_extension(ext) is not None
  313. def platform(self):
  314. return self.release_details.get_full_platform()
  315. def get_zip_file(self):
  316. self.tempfolder = extract_files_from_archive(self.zipfile)
  317. print(
  318. f'Artifacts for {self.name} extracted to {self.tempfolder}')
  319. flash_parms_file = os.path.relpath(
  320. self.tempfolder+self.flash_file_path)
  321. line: str
  322. with open(flash_parms_file) as fin:
  323. for line in fin:
  324. components = line.split()
  325. if len(components) == 2:
  326. self.flash_parms[os.path.basename(
  327. components[1]).rstrip().lstrip()] = components[0]
  328. try:
  329. for artifact in artifacts_formats:
  330. base_name = os.path.basename(artifact[0]).rstrip().lstrip()
  331. self.bin_files.append(BinFile(
  332. self.tempfolder, artifact[0], self.flash_parms[base_name], self.release_details, self.build_dir))
  333. has_artifacts = True
  334. except Exception:
  335. self.has_artifacts = False
  336. def cleanup(self):
  337. print(
  338. f'removing temp directory for platform release {self.name}')
  339. shutil.rmtree(self.tempfolder)
  340. def get_attributes(self):
  341. return {
  342. 'name': self.name,
  343. 'branch': self.branch,
  344. 'description': self.description,
  345. 'url': self.url,
  346. 'zipfile': self.zipfile,
  347. 'release_details': self.release_details.get_attributes(),
  348. 'bin_files': [b.get_attributes() for b in self.bin_files],
  349. 'manifest_name': self.get_manifest_name()
  350. }
  351. class Releases():
  352. _dict: dict = collections.OrderedDict()
  353. maxcount: int = 0
  354. branch: str = ''
  355. repo: Repository = None
  356. last_commit: Commit = None
  357. manifest_name: str
  358. def __init__(self, branch: str, maxcount: int = 3) -> None:
  359. self.maxcount = maxcount
  360. self.branch = branch
  361. def count(self, value: PlatformRelease) -> int:
  362. content = self._dict.get(value.platform())
  363. if content == None:
  364. return 0
  365. return len(content)
  366. def get_platform(self, platform: str) -> list:
  367. return self._dict[platform]
  368. def get_platform_keys(self):
  369. return self._dict.keys()
  370. def get_all(self) -> list:
  371. result: list = []
  372. for platform in [self.get_platform(platform) for platform in self.get_platform_keys()]:
  373. for release in platform:
  374. result.append(release)
  375. return result
  376. def append(self, value: PlatformRelease):
  377. # optional processing here
  378. if self.count(value) == 0:
  379. self._dict[value.platform()] = []
  380. if self.should_add(value):
  381. print(f'Adding release {value.name} to the list')
  382. self._dict[value.platform()].append(value)
  383. else:
  384. print(f'Skipping release {value.name}')
  385. def get_attributes(self):
  386. res = []
  387. release: PlatformRelease
  388. for release in self.get_all():
  389. res.append(release.get_attributes())
  390. return res
  391. def get_minlen(self) -> int:
  392. return min([len(self.get_platform(p)) for p in self.get_platform_keys()])
  393. def got_all_packages(self) -> bool:
  394. return self.get_minlen() >= self.maxcount
  395. def should_add(self, release: PlatformRelease) -> bool:
  396. return self.count(release) <= self.maxcount
  397. def add_package(self, package: PlatformRelease, with_artifacts: bool = True):
  398. if self.branch != package.branch:
  399. print(
  400. f'Skipping release {package.name} from branch {package.branch}')
  401. elif package.has_artifacts or not with_artifacts:
  402. self.append(package)
  403. @classmethod
  404. def get_last_commit_message(cls, repo_obj: Repository = None) -> str:
  405. last: Commit = cls.get_last_commit(repo_obj)
  406. if last is None:
  407. return ''
  408. else:
  409. return last.message.replace(NEWLINE_CHAR, ' ')
  410. @classmethod
  411. def get_last_author(cls, repo_obj: Repository = None) -> Signature:
  412. last: Commit = cls.get_last_commit(repo_obj)
  413. return last.author
  414. @classmethod
  415. def get_last_committer(cls, repo_obj: Repository = None) -> Signature:
  416. last: Commit = cls.get_last_commit(repo_obj)
  417. return last.committer
  418. @classmethod
  419. def get_last_commit(cls, repo_obj: Repository = None) -> Commit:
  420. loc_repo = repo_obj
  421. if cls.repo is None:
  422. cls.load_repository(os.getcwd())
  423. if loc_repo is None:
  424. loc_repo = cls.repo
  425. head: Reference = loc_repo.head
  426. target = head.target
  427. ref: Reference
  428. if cls.last_commit is None:
  429. try:
  430. cls.last_commit = loc_repo[target]
  431. print(
  432. f'Last commit for {head.shorthand} is {format_commit(cls.last_commit)}')
  433. except Exception as e:
  434. print_error(
  435. f'Unable to retrieve last commit for {head.shorthand}/{target}: {e}')
  436. cls.last_commit = None
  437. return cls.last_commit
  438. @classmethod
  439. def load_repository(cls, path: str = os.getcwd()) -> Repository:
  440. if cls.repo is None:
  441. try:
  442. print(f'Opening repository from {path}')
  443. cls.repo = Repository(path=path)
  444. except GitError as ex:
  445. print_error(f"Unable to access the repository.\nContent of {path}:\n{NEWLINE_CHAR.join(get_file_list(path, 1))}")
  446. raise
  447. return cls.repo
  448. @classmethod
  449. def resolve_commit(cls, repo: Repository, commit_id: str) -> Commit:
  450. commit: Commit
  451. reference: Reference
  452. commit, reference = repo.resolve_refish(commit_id)
  453. return commit
  454. @classmethod
  455. def get_branch_name(cls) -> str:
  456. return re.sub('[^a-zA-Z0-9\-~!@_\.]', '', cls.load_repository().head.shorthand)
  457. @classmethod
  458. def get_release_branch(cls, repo: Repository, platform_release) -> str:
  459. match = [t for t in repo.branches.with_commit(
  460. platform_release.target_commitish)]
  461. no_origin = [t for t in match if 'origin' not in t]
  462. if len(no_origin) == 0 and len(match) > 0:
  463. return match[0].split('/')[1]
  464. elif len(no_origin) > 0:
  465. return no_origin[0]
  466. return ''
  467. @classmethod
  468. def get_flash_parms(cls, file_path):
  469. flash = parse_json(file_path)
  470. od: collections.OrderedDict = collections.OrderedDict()
  471. for z in flash['flash_files'].items():
  472. base_name: str = os.path.basename(z[1])
  473. od[base_name.rstrip().lstrip()] = literal_eval(z[0])
  474. return collections.OrderedDict(sorted(od.items()))
  475. @classmethod
  476. def get_releases(cls, flash_file_path, maxcount: int, name_prefix):
  477. repo = Releases.load_repository(os.getcwd())
  478. packages: Releases = cls(branch=repo.head.shorthand, maxcount=maxcount)
  479. build_dir = os.path.dirname(flash_file_path)
  480. for page in range(1, 999):
  481. print_debug(f'Getting releases page {page}')
  482. releases = get_github_data(
  483. repo, f'releases?per_page=50&page={page}')
  484. if len(releases) == 0:
  485. print_debug(f'No more release found for page {page}')
  486. break
  487. for release_entry in [AttributeDict(platform) for platform in releases]:
  488. packages.add_package(PlatformRelease(flash_file_path, release_entry, build_dir,
  489. Releases.get_release_branch(repo, release_entry), name_prefix))
  490. if packages.got_all_packages():
  491. break
  492. if packages.got_all_packages():
  493. break
  494. return packages
  495. @classmethod
  496. def get_commit_list(cls) -> list:
  497. commit_list = []
  498. last: Commit = Releases.get_last_commit()
  499. if last is None:
  500. return commit_list
  501. try:
  502. for c in Releases.load_repository().walk(last.id, pygit2.GIT_SORT_TIME):
  503. if '[skip actions]' not in c.message:
  504. commit_list.append(format_commit(c))
  505. if len(commit_list) > 10:
  506. break
  507. except Exception as e:
  508. print_error(
  509. f'Unable to get commit list starting at {last.id}: {e}')
  510. return commit_list
  511. @classmethod
  512. def get_commit_list_descriptions(cls) -> str:
  513. return '<<~EOD\n### Revision Log\n'+NEWLINE_CHAR.join(cls.get_commit_list())+'\n~EOD'
  514. def update(self, *args, **kwargs):
  515. if args:
  516. if len(args) > 1:
  517. raise TypeError("update expected at most 1 arguments, "
  518. "got %d" % len(args))
  519. other = dict(args[0])
  520. for key in other:
  521. self[key] = other[key]
  522. for key in kwargs:
  523. self[key] = kwargs[key]
  524. def setdefault(self, key, value=None):
  525. if key not in self:
  526. self[key] = value
  527. return self[key]
  528. def set_workdir(args):
  529. print(f'setting work dir to: {args.cwd}')
  530. os.chdir(os.path.abspath(args.cwd))
  531. def parse_json(filename: str):
  532. fname = os.path.abspath(filename)
  533. folder: str = os.path.abspath(os.path.dirname(filename))
  534. print(f'Opening json file {fname} from {folder}')
  535. try:
  536. with open(fname) as f:
  537. content = f.read()
  538. print_debug(f'Loading json\n{content}')
  539. return json.loads(content)
  540. except JSONDecodeError as ex:
  541. print_error(f'Error parsing {content}')
  542. except Exception as ex:
  543. print_error(
  544. f"Unable to parse flasher args json file. Content of {folder}:{NEWLINE_CHAR.join(get_file_list(folder))}")
  545. raise
  546. def write_github_env(args):
  547. print(f'Writing environment details to {args.env_file}...')
  548. with open(args.env_file, "w") as env_file:
  549. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  550. line = f'{attr}{"=" if attr != "description" else ""}{getattr(github_env,attr)}'
  551. print(line)
  552. env_file.write(f'{line}\n')
  553. os.environ[attr] = str(getattr(github_env, attr))
  554. print(f'Done writing environment details to {args.env_file}!')
  555. def set_workflow_output(args):
  556. print(f'Outputting job variables ...')
  557. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  558. print(f'::set-output name={attr}::{getattr(github_env,attr)}')
  559. os.environ[attr] = str(getattr(github_env, attr))
  560. print(f'Done outputting job variables!')
  561. def format_artifact_from_manifest(manif_json: AttributeDict):
  562. if len(manif_json) == 0:
  563. return 'Newest release'
  564. first = manif_json[0]
  565. return f'{first["branch"]}-{first["release_details"]["version"]}'
  566. def format_artifact_name(base_name: str = '', args=AttributeDict(os.environ)):
  567. return f'{base_name}{args.branch_name}-{args.node}-{args.depth}-{args.major}{args.build}'
  568. def handle_build_flags(args):
  569. set_workdir(args)
  570. print('Setting global build flags')
  571. commit_message: str = Releases.get_last_commit_message()
  572. github_env.mock = 1 if args.mock else 0
  573. github_env.release_flag = 1 if args.mock or args.force or 'release' in commit_message.lower() else 0
  574. github_env.ui_build = 1 if args.mock or args.ui_build or '[ui-build]' in commit_message.lower(
  575. ) or github_env.release_flag == 1 else 0
  576. set_workflow_output(github_env)
  577. def handle_environment(args):
  578. set_workdir(args)
  579. print('Setting environment variables...')
  580. commit_message: str = Releases.get_last_commit_message()
  581. last: Commit = Releases.get_last_commit()
  582. if last is not None:
  583. github_env.author_name = last.author.name
  584. github_env.author_email = last.author.email
  585. github_env.committer_name = last.committer.name
  586. github_env.committer_email = last.committer.email
  587. github_env.node = args.node
  588. github_env.depth = args.depth
  589. github_env.major = args.major
  590. github_env.build = args.build
  591. github_env.DEPTH = args.depth
  592. github_env.TARGET_BUILD_NAME = args.node
  593. github_env.build_version_prefix = args.major
  594. github_env.branch_name = Releases.get_branch_name()
  595. github_env.BUILD_NUMBER = str(args.build)
  596. github_env.tag = f'{args.node}.{args.depth}.{args.build}.{github_env.branch_name}'.rstrip()
  597. github_env.last_commit = commit_message
  598. github_env.DOCKER_IMAGE_NAME = args.docker
  599. github_env.name = f"{args.major}.{str(args.build)}-{args.depth}#v4.3#{args.node}#{github_env.branch_name}"
  600. github_env.artifact_prefix = format_artifact_name(
  601. 'squeezelite-esp32-', github_env)
  602. github_env.artifact_file_name = f"{github_env.artifact_prefix}.zip"
  603. github_env.artifact_bin_file_name = f"{github_env.artifact_prefix}.bin"
  604. github_env.PROJECT_VER = f'{args.node}-{ args.build }'
  605. github_env.description = Releases.get_commit_list_descriptions()
  606. write_github_env(args)
  607. def handle_artifacts(args):
  608. set_workdir(args)
  609. print(f'Handling artifacts')
  610. for attr in artifacts_formats:
  611. target: str = os.path.relpath(attr[1].replace(artifacts_formats_outdir, args.outdir).replace(
  612. artifacts_formats_prefix, format_artifact_name()))
  613. source: str = os.path.relpath(attr[0])
  614. target_dir: str = os.path.dirname(target)
  615. print(f'Copying file {source} to {target}')
  616. try:
  617. os.makedirs(target_dir, exist_ok=True)
  618. shutil.copyfile(source, target, follow_symlinks=True)
  619. except Exception as ex:
  620. print_error(f"Error while copying {source} to {target}\nContent of {target_dir}:\n{NEWLINE_CHAR.join(get_file_list(os.path.dirname(attr[0].rstrip())))}")
  621. raise
  622. def delete_folder(path):
  623. '''Remov Read Only Files'''
  624. for root, dirs, files in os.walk(path, topdown=True):
  625. for dir in dirs:
  626. fulldirpath = os.path.join(root, dir)
  627. print_debug(f'Drilling down in {fulldirpath}')
  628. delete_folder(fulldirpath)
  629. for fname in files:
  630. full_path = os.path.join(root, fname)
  631. print_debug(f'Setting file read/write {full_path}')
  632. os.chmod(full_path, stat.S_IWRITE)
  633. print_debug(f'Deleting file {full_path}')
  634. os.remove(full_path)
  635. if os.path.exists(path):
  636. print_debug(f'Changing folder read/write {path}')
  637. os.chmod(path, stat.S_IWRITE)
  638. print(f'WARNING: Deleting Folder {path}')
  639. os.rmdir(path)
  640. def get_file_stats(path):
  641. fstat: os.stat_result = pathlib.Path(path).stat()
  642. # Convert file size to MB, KB or Bytes
  643. mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
  644. if (fstat.st_size > 1024 * 1024):
  645. return math.ceil(fstat.st_size / (1024 * 1024)), "MB", mtime
  646. elif (fstat.st_size > 1024):
  647. return math.ceil(fstat.st_size / 1024), "KB", mtime
  648. return fstat.st_size, "B", mtime
  649. def get_file_list(root_path, max_levels: int = 2) -> list:
  650. outlist: list = []
  651. for root, dirs, files in os.walk(root_path):
  652. path = os.path.relpath(root).split(os.sep)
  653. if len(path) <= max_levels:
  654. outlist.append(f'\n{root}')
  655. for file in files:
  656. full_name = os.path.join(root, file)
  657. fsize, unit, mtime = get_file_stats(full_name)
  658. outlist.append('{:s} {:8d} {:2s} {:18s}\t{:s}'.format(
  659. len(path) * "---", fsize, unit, mtime, file))
  660. return outlist
  661. def get_recursive_list(path) -> list:
  662. outlist: list = []
  663. for root, dirs, files in os.walk(path, topdown=True):
  664. for fname in files:
  665. outlist.append((fname, os.path.join(root, fname)))
  666. return outlist
  667. def handle_manifest(args):
  668. set_workdir(args)
  669. print(f'Creating the web installer manifest')
  670. outdir: str = os.path.relpath(args.outdir)
  671. if not os.path.exists(outdir):
  672. print(f'Creating target folder {outdir}')
  673. os.makedirs(outdir, exist_ok=True)
  674. releases: Releases = Releases.get_releases(
  675. args.flash_file, args.max_count, args.manif_name)
  676. release: PlatformRelease
  677. for release in releases.get_all():
  678. manifest_name = release.get_manifest_name()
  679. release.get_zip_file()
  680. man = copy.deepcopy(manifest)
  681. man['manifest_name'] = manifest_name
  682. man['builds'][0]['parts'] = release.process_files(args.outdir)
  683. man['name'] = release.platform()
  684. man['version'] = release.release_details.version
  685. print_debug(f'Generated manifest: \n{json.dumps(man)}')
  686. fullpath = os.path.join(args.outdir, release.get_manifest_name())
  687. print(f'Writing manifest to {fullpath}')
  688. with open(fullpath, "w") as f:
  689. json.dump(man, f, indent=4)
  690. release.cleanup()
  691. mainmanifest = os.path.join(args.outdir, args.manif_name)
  692. print(f'Writing main manifest {mainmanifest}')
  693. with open(mainmanifest, 'w') as f:
  694. json.dump(releases.get_attributes(), f, indent=4)
  695. def get_new_file_names(manif_json) -> collections.OrderedDict():
  696. new_release_files: dict = collections.OrderedDict()
  697. for artifact in manif_json:
  698. for name in [f["name"] for f in artifact["bin_files"]]:
  699. new_release_files[name] = artifact
  700. new_release_files[artifact["manifest_name"]] = artifact["name"]
  701. return new_release_files
  702. def copy_no_overwrite(source: str, target: str):
  703. sfiles = os.listdir(source)
  704. for f in sfiles:
  705. source_file = os.path.join(source, f)
  706. target_file = os.path.join(target, f)
  707. if not os.path.exists(target_file):
  708. print(f'Copying {f} to target')
  709. shutil.copy(source_file, target_file)
  710. else:
  711. print_debug(f'Skipping existing file {f}')
  712. def get_changed_items(repo: Repository) -> Dict:
  713. changed_filemode_status_code: int = pygit2.GIT_FILEMODE_TREE
  714. original_status_dict: Dict[str, int] = repo.status()
  715. # transfer any non-filemode changes to a new dictionary
  716. status_dict: Dict[str, int] = {}
  717. for filename, code in original_status_dict.items():
  718. if code != changed_filemode_status_code:
  719. status_dict[filename] = code
  720. return status_dict
  721. def is_dirty(repo: Repository) -> bool:
  722. return len(get_changed_items(repo)) > 0
  723. def push_if_change(repo: Repository, token: str, source_path: str, manif_json):
  724. if is_dirty(repo):
  725. print(f'Changes found. Preparing commit')
  726. env = AttributeDict(os.environ)
  727. index: Index = repo.index
  728. index.add_all()
  729. index.write()
  730. reference = repo.head.name
  731. message = f'Web installer for {format_artifact_from_manifest(manif_json)}'
  732. tree = index.write_tree()
  733. Releases.load_repository(source_path)
  734. commit = repo.create_commit(reference, Releases.get_last_author(
  735. ), Releases.get_last_committer(), message, tree, [repo.head.target])
  736. origin: Remote = repo.remotes['origin']
  737. print(
  738. f'Pushing commit {format_commit(repo[commit])} to url {origin.url}')
  739. credentials = UserPass(token, 'x-oauth-basic') # passing credentials
  740. remote: Remote = repo.remotes['origin']
  741. # remote.credentials = credentials
  742. auth_method = 'x-access-token'
  743. remote.push([reference], callbacks=RemoteCallbacks(
  744. pygit2.UserPass(auth_method, token)))
  745. print(
  746. f'::notice Web installer updated for {format_artifact_from_manifest(manif_json)}')
  747. else:
  748. print(f'WARNING: No change found. Skipping update')
  749. def update_files(target_artifacts: str, manif_json, source: str):
  750. new_list: dict = get_new_file_names(manif_json)
  751. if os.path.exists(target_artifacts):
  752. print(f'Removing obsolete files from {target_artifacts}')
  753. for entry in get_recursive_list(target_artifacts):
  754. f = entry[0]
  755. full_target = entry[1]
  756. if f not in new_list.keys():
  757. print(f'WARNING: Removing obsolete file {f}')
  758. os.remove(full_target)
  759. else:
  760. print(f'Creating target folder {target_artifacts}')
  761. os.makedirs(target_artifacts, exist_ok=True)
  762. print(f'Copying installer files to {target_artifacts}:')
  763. copy_no_overwrite(os.path.abspath(source), target_artifacts)
  764. def handle_pushinstaller(args):
  765. set_workdir(args)
  766. print('Pushing web installer updates... ')
  767. target_artifacts = os.path.join(args.target, args.artifacts)
  768. if os.path.exists(args.target):
  769. print(f'Removing files (if any) from {args.target}')
  770. delete_folder(args.target)
  771. print(f'Cloning from {args.url} into {args.target}')
  772. repo = pygit2.clone_repository(args.url, args.target)
  773. repo.checkout_head()
  774. manif_json = parse_json(os.path.join(args.source, args.manif_name))
  775. update_files(target_artifacts, manif_json, args.source)
  776. push_if_change(repo, args.token, args.cwd, manif_json)
  777. repo.state_cleanup()
  778. def handle_show(args):
  779. print('Show')
  780. def extract_files_from_archive(url):
  781. tempfolder = tempfile.mkdtemp()
  782. platform:Response = requests.get(url)
  783. print_debug(f'Downloading {url} to {tempfolder}')
  784. print_debug(f'Transfer status code: {platform.status_code}. Expanding content')
  785. z = zipfile.ZipFile(io.BytesIO(platform.content))
  786. z.extractall(tempfolder)
  787. return tempfolder
  788. def handle_list_files(args):
  789. print(f'Content of {args.cwd}:')
  790. print(NEWLINE_CHAR.join(get_file_list(args.cwd)))
  791. parser_environment.set_defaults(func=handle_environment, cmd='environment')
  792. parser_manifest.set_defaults(func=handle_manifest, cmd='manifest')
  793. parser_pushinstaller.set_defaults(func=handle_pushinstaller, cmd='installer')
  794. parser_show.set_defaults(func=handle_show, cmd='show')
  795. parser_build_flags.set_defaults(func=handle_build_flags, cmd='build_flags')
  796. parser_dir.set_defaults(func=handle_list_files, cmd='list_files')
  797. def main():
  798. exit_result_code = 0
  799. args = parser.parse_args()
  800. print(f'::group::{args.command}')
  801. print(f'build_tools version : {tool_version}')
  802. print(f'Processing command {args.command}')
  803. func: Callable = getattr(args, 'func', None)
  804. if func is not None:
  805. # Call whatever subcommand function was selected
  806. e: Exception
  807. try:
  808. func(args)
  809. except Exception as e:
  810. print_error(f'Critical error while running {args.command}\n{" ".join(traceback.format_exception(etype=type(e), value=e, tb=e.__traceback__))}')
  811. exit_result_code = 1
  812. else:
  813. # No subcommand was provided, so call help
  814. parser.print_usage()
  815. print(f'::endgroup::')
  816. sys.exit(exit_result_code)
  817. if __name__ == '__main__':
  818. main()