build_tools.py 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981
  1. #!/usr/bin/env python
  2. from json import JSONDecodeError
  3. import math
  4. import pathlib
  5. import time
  6. import traceback
  7. from typing import Callable, Dict
  8. import pkg_resources
  9. import sys
  10. import os
  11. import io
  12. from os import walk
  13. from requests import Response
  14. class Logger:
  15. NEWLINE_CHAR = '\n'
  16. @classmethod
  17. def print_message(cls,message,prefix=''):
  18. trimmed=re.sub(r'\n', r'%0A', message,flags=re.MULTILINE)
  19. print(f'{prefix}{trimmed}')
  20. @classmethod
  21. def debug(cls,message):
  22. cls.print_message(message,'::debug::')
  23. @classmethod
  24. def error(cls,message):
  25. cls.print_message(message,'::error::')
  26. @classmethod
  27. def notice(cls,message):
  28. cls.print_message(message,'::notice::')
  29. @classmethod
  30. def warning(cls,message):
  31. cls.print_message(message,'::notice::')
  32. try:
  33. import argparse
  34. import collections
  35. import copy
  36. import enum
  37. import glob
  38. import json
  39. import re
  40. import shutil
  41. import stat
  42. import tempfile
  43. import zipfile
  44. from ast import literal_eval
  45. from collections import namedtuple
  46. from datetime import datetime, timedelta, timezone
  47. from json import JSONDecoder
  48. from operator import contains
  49. from platform import platform, release
  50. from pydoc import describe
  51. from time import strftime
  52. from typing import OrderedDict
  53. from urllib import response
  54. from urllib.parse import urlparse
  55. from urllib.request import Request
  56. from webbrowser import get
  57. import pygit2
  58. from pygit2 import Commit, Repository, GitError, Reference, UserPass, Index, Signature, RemoteCallbacks, Remote
  59. import requests
  60. from genericpath import isdir
  61. except ImportError as ex:
  62. Logger.error(
  63. f'Failed importing module {ex.name}, using interpreter {sys.executable}. {Logger.NEWLINE_CHAR} Installed packages:')
  64. installed_packages = pkg_resources.working_set
  65. installed_packages_list = sorted(
  66. ["%s==%s" % (i.key, i.version) for i in installed_packages])
  67. print(Logger.NEWLINE_CHAR.join(installed_packages_list))
  68. print(f'Environment: ')
  69. envlist = "\n".join([f"{k}={v}" for k, v in sorted(os.environ.items())])
  70. print(f'{envlist}')
  71. raise
  72. tool_version = "1.0.7"
  73. WEB_INSTALLER_DEFAULT_PATH = './web_installer/'
  74. FORMAT = '%(asctime)s %(message)s'
  75. github_env = type('', (), {})()
  76. manifest = {
  77. "name": "",
  78. "version": "",
  79. "home_assistant_domain": "slim_player",
  80. "funding_url": "https://esphome.io/guides/supporters.html",
  81. "new_install_prompt_erase": True,
  82. "new_install_improv_wait_time" : 20,
  83. "builds": [
  84. {
  85. "chipFamily": "ESP32",
  86. "parts": [
  87. ]
  88. }
  89. ]
  90. }
  91. artifacts_formats_outdir = '$OUTDIR'
  92. artifacts_formats_prefix = '$PREFIX'
  93. artifacts_formats = [
  94. ['build/squeezelite.bin', '$OUTDIR/$PREFIX-squeezelite.bin'],
  95. ['build/recovery.bin', '$OUTDIR/$PREFIX-recovery.bin'],
  96. ['build/ota_data_initial.bin', '$OUTDIR/$PREFIX-ota_data_initial.bin'],
  97. ['build/bootloader/bootloader.bin', '$OUTDIR/$PREFIX-bootloader.bin'],
  98. ['build/partition_table/partition-table.bin ',
  99. '$OUTDIR/$PREFIX-partition-table.bin'],
  100. ]
  101. class AttributeDict(dict):
  102. __slots__ = ()
  103. def __getattr__(self, name: str):
  104. try:
  105. return self[name.upper()]
  106. except Exception:
  107. try:
  108. return self[name.lower()]
  109. except Exception:
  110. for attr in self.keys():
  111. if name.lower() == attr.replace("'", "").lower():
  112. return self[attr]
  113. __setattr__ = dict.__setitem__
  114. parser = argparse.ArgumentParser(
  115. description='Handles some parts of the squeezelite-esp32 build process')
  116. parser.add_argument('--cwd', type=str,
  117. help='Working directory', default=os.getcwd())
  118. parser.add_argument('--loglevel', type=str, choices={
  119. 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'}, help='Logging level', default='INFO')
  120. subparsers = parser.add_subparsers(dest='command', required=True)
  121. parser_dir = subparsers.add_parser("list_files",
  122. add_help=False,
  123. description="List Files parser",
  124. help="Display the content of the folder")
  125. parser_manifest = subparsers.add_parser("manifest",
  126. add_help=False,
  127. description="Manifest parser",
  128. help="Handles the web installer manifest creation")
  129. parser_manifest.add_argument('--flash_file', required=True, type=str,
  130. help='The file path which contains the firmware flashing definition')
  131. parser_manifest.add_argument(
  132. '--max_count', type=int, help='The maximum number of releases to keep', default=3)
  133. parser_manifest.add_argument(
  134. '--manif_name', required=True, type=str, help='Manifest files name and prefix')
  135. parser_manifest.add_argument(
  136. '--outdir', required=True, type=str, help='Output directory for files and manifests')
  137. parser_pushinstaller = subparsers.add_parser("pushinstaller",
  138. add_help=False,
  139. description="Web Installer Checkout parser",
  140. help="Handles the creation of artifacts files")
  141. parser_pushinstaller.add_argument(
  142. '--target', type=str, help='Output directory for web installer repository', default=WEB_INSTALLER_DEFAULT_PATH)
  143. parser_pushinstaller.add_argument(
  144. '--artifacts', type=str, help='Target subdirectory for web installer artifacts', default=WEB_INSTALLER_DEFAULT_PATH)
  145. parser_pushinstaller.add_argument(
  146. '--source', type=str, help='Source directory for the installer artifacts', default=WEB_INSTALLER_DEFAULT_PATH)
  147. parser_pushinstaller.add_argument('--url', type=str, help='Web Installer clone url ',
  148. default='https://github.com/sle118/squeezelite-esp32-installer.git')
  149. parser_pushinstaller.add_argument(
  150. '--web_installer_branch', type=str, help='Web Installer branch to use ', default='main')
  151. parser_pushinstaller.add_argument(
  152. '--token', type=str, help='Auth token for pushing changes')
  153. parser_pushinstaller.add_argument(
  154. '--flash_file', type=str, help='Manifest json file path')
  155. parser_pushinstaller.add_argument(
  156. '--manif_name', required=True, type=str, help='Manifest files name and prefix')
  157. parser_environment = subparsers.add_parser("environment",
  158. add_help=False,
  159. description="Environment parser",
  160. help="Updates the build environment")
  161. parser_environment.add_argument(
  162. '--env_file', type=str, help='Environment File', default=os.environ.get('GITHUB_ENV'))
  163. parser_environment.add_argument(
  164. '--build', required=True, type=int, help='The build number')
  165. parser_environment.add_argument(
  166. '--node', required=True, type=str, help='The matrix node being built')
  167. parser_environment.add_argument(
  168. '--depth', required=True, type=int, help='The bit depth being built')
  169. parser_environment.add_argument(
  170. '--major', type=str, help='Major version', default='2')
  171. parser_environment.add_argument(
  172. '--docker', type=str, help='Docker image to use', default='sle118/squeezelite-esp32-idfv43')
  173. parser_show = subparsers.add_parser("show",
  174. add_help=False,
  175. description="Show parser",
  176. help="Show the build environment")
  177. parser_build_flags = subparsers.add_parser("build_flags",
  178. add_help=False,
  179. description="Build Flags",
  180. help="Updates the build environment with build flags")
  181. parser_build_flags.add_argument(
  182. '--mock', action='store_true', help='Mock release')
  183. parser_build_flags.add_argument(
  184. '--force', action='store_true', help='Force a release build')
  185. parser_build_flags.add_argument(
  186. '--ui_build', action='store_true', help='Include building the web UI')
  187. def format_commit(commit):
  188. # 463a9d8b7 Merge branch 'bugfix/ci_deploy_tags_v4.0' into 'release/v4.0' (2020-01-11T14:08:55+08:00)
  189. dt = datetime.fromtimestamp(float(commit.author.time), timezone(
  190. timedelta(minutes=commit.author.offset)))
  191. timestr = dt.strftime('%c%z')
  192. cmesg = commit.message.replace('\n', ' ')
  193. return f'{commit.short_id} {cmesg} ({timestr}) <{commit.author.name}>'.replace(' ', ' ', )
  194. def get_github_data(repo: Repository, api):
  195. base_url = urlparse(repo.remotes['origin'].url)
  196. print(
  197. f'Base URL is {base_url.path} from remote URL {repo.remotes["origin"].url}')
  198. url_parts = base_url.path.split('.')
  199. for p in url_parts:
  200. print(f'URL Part: {p}')
  201. api_url = f"{url_parts[0]}/{api}"
  202. print(f'API to call: {api_url}')
  203. url = f"https://api.github.com/repos{api_url}"
  204. resp = requests.get(
  205. url, headers={"Content-Type": "application/vnd.github.v3+json"})
  206. return json.loads(resp.text)
  207. def dump_directory(dir_path):
  208. # list to store files name
  209. res = []
  210. for (dir_path, dir_names, file_names) in walk(dir_path):
  211. res.extend(file_names)
  212. print(res)
  213. class ReleaseDetails():
  214. version: str
  215. idf: str
  216. platform: str
  217. branch: str
  218. bitrate: str
  219. def __init__(self, tag: str) -> None:
  220. self.version, self.idf, self.platform, self.branch = tag.split('#')
  221. try:
  222. self.version, self.bitrate = self.version.split('-')
  223. except Exception:
  224. pass
  225. def get_attributes(self):
  226. return {
  227. 'version': self.version,
  228. 'idf': self.idf,
  229. 'platform': self.platform,
  230. 'branch': self.branch,
  231. 'bitrate': self.bitrate
  232. }
  233. def format_prefix(self) -> str:
  234. return f'{self.branch}-{self.platform}-{self.version}'
  235. def get_full_platform(self):
  236. return f"{self.platform}{f'-{self.bitrate}' if self.bitrate is not None else ''}"
  237. class BinFile():
  238. name: str
  239. offset: int
  240. source_full_path: str
  241. target_name: str
  242. target_fullpath: str
  243. artifact_relpath: str
  244. def __init__(self, source_path, file_build_path: str, offset: int, release_details: ReleaseDetails, build_dir) -> None:
  245. self.name = os.path.basename(file_build_path).rstrip()
  246. self.artifact_relpath = os.path.relpath(
  247. file_build_path, build_dir).rstrip()
  248. self.source_path = source_path
  249. self.source_full_path = os.path.join(
  250. source_path, file_build_path).rstrip()
  251. self.offset = offset
  252. self.target_name = f'{release_details.format_prefix()}-{release_details.bitrate}-{self.name}'.rstrip()
  253. def get_manifest(self):
  254. return {"path": self.target_name, "offset": self.offset}
  255. def copy(self, target_folder) -> str:
  256. self.target_fullpath = os.path.join(target_folder, self.target_name)
  257. Logger.debug(
  258. f'File {self.source_full_path} will be copied to {self.target_fullpath}')
  259. try:
  260. os.makedirs(target_folder, exist_ok=True)
  261. shutil.copyfile(self.source_full_path,
  262. self.target_fullpath, follow_symlinks=True)
  263. except Exception as ex:
  264. Logger.error(f"Error while copying {self.source_full_path} to {self.target_fullpath}{Logger.NEWLINE_CHAR}Content of {os.path.dirname(self.source_full_path.rstrip())}:{Logger.NEWLINE_CHAR}{Logger.NEWLINE_CHAR.join(get_file_list(os.path.dirname(self.source_full_path.rstrip())))}")
  265. raise
  266. return self.target_fullpath
  267. def get_attributes(self):
  268. return {
  269. 'name': self.target_name,
  270. 'offset': self.offset,
  271. 'artifact_relpath': self.artifact_relpath
  272. }
  273. class PlatformRelease():
  274. name: str
  275. description: str
  276. url: str = ''
  277. zipfile: str = ''
  278. tempfolder: str
  279. release_details: ReleaseDetails
  280. flash_parms = {}
  281. build_dir: str
  282. has_artifacts: bool
  283. branch: str
  284. assets: list
  285. bin_files: list
  286. name_prefix: str
  287. flash_file_path: str
  288. def get_manifest_name(self) -> str:
  289. return f'{self.name_prefix}-{self.release_details.format_prefix()}-{self.release_details.bitrate}.json'
  290. def __init__(self, flash_file_path, git_release, build_dir, branch, name_prefix) -> None:
  291. self.name = git_release.tag_name
  292. self.description = git_release.body
  293. self.assets = git_release['assets']
  294. self.has_artifacts = False
  295. self.name_prefix = name_prefix
  296. if len(self.assets) > 0:
  297. if self.has_asset_type():
  298. self.url = self.get_asset_from_extension().browser_download_url
  299. if self.has_asset_type('.zip'):
  300. self.zipfile = self.get_asset_from_extension(
  301. ext='.zip').browser_download_url
  302. self.has_artifacts = True
  303. self.release_details = ReleaseDetails(git_release.name)
  304. self.bin_files = list()
  305. self.flash_file_path = flash_file_path
  306. self.build_dir = os.path.relpath(build_dir)
  307. self.branch = branch
  308. def process_files(self, outdir: str) -> list:
  309. parts = []
  310. for f in self.bin_files:
  311. f.copy(outdir)
  312. parts.append(f.get_manifest())
  313. return parts
  314. def get_asset_from_extension(self, ext='.bin'):
  315. for a in self.assets:
  316. filename = AttributeDict(a).name
  317. file_name, file_extension = os.path.splitext(filename)
  318. if file_extension == ext:
  319. return AttributeDict(a)
  320. return None
  321. def has_asset_type(self, ext='.bin') -> bool:
  322. return self.get_asset_from_extension(ext) is not None
  323. def platform(self):
  324. return self.release_details.get_full_platform()
  325. def get_zip_file(self):
  326. self.tempfolder = extract_files_from_archive(self.zipfile)
  327. print(
  328. f'Artifacts for {self.name} extracted to {self.tempfolder}')
  329. flash_parms_file = os.path.relpath(
  330. self.tempfolder+self.flash_file_path)
  331. line: str
  332. with open(flash_parms_file) as fin:
  333. for line in fin:
  334. components = line.split()
  335. if len(components) == 2:
  336. self.flash_parms[os.path.basename(
  337. components[1]).rstrip().lstrip()] = components[0]
  338. try:
  339. for artifact in artifacts_formats:
  340. base_name = os.path.basename(artifact[0]).rstrip().lstrip()
  341. self.bin_files.append(BinFile(
  342. self.tempfolder, artifact[0], self.flash_parms[base_name], self.release_details, self.build_dir))
  343. has_artifacts = True
  344. except Exception:
  345. self.has_artifacts = False
  346. def cleanup(self):
  347. Logger.debug(f'removing temp directory for platform release {self.name}')
  348. shutil.rmtree(self.tempfolder)
  349. def get_attributes(self):
  350. return {
  351. 'name': self.name,
  352. 'branch': self.branch,
  353. 'description': self.description,
  354. 'url': self.url,
  355. 'zipfile': self.zipfile,
  356. 'release_details': self.release_details.get_attributes(),
  357. 'bin_files': [b.get_attributes() for b in self.bin_files],
  358. 'manifest_name': self.get_manifest_name()
  359. }
  360. class Releases():
  361. _dict: dict = collections.OrderedDict()
  362. maxcount: int = 0
  363. branch: str = ''
  364. repo: Repository = None
  365. last_commit: Commit = None
  366. manifest_name: str
  367. def __init__(self, branch: str, maxcount: int = 3) -> None:
  368. self.maxcount = maxcount
  369. self.branch = branch
  370. def count(self, value: PlatformRelease) -> int:
  371. content = self._dict.get(value.platform())
  372. if content == None:
  373. return 0
  374. return len(content)
  375. def get_platform(self, platform: str) -> list:
  376. return self._dict[platform]
  377. def get_platform_keys(self):
  378. return self._dict.keys()
  379. def get_all(self) -> list:
  380. result: list = []
  381. for platform in [self.get_platform(platform) for platform in self.get_platform_keys()]:
  382. for release in platform:
  383. result.append(release)
  384. return result
  385. def append(self, value: PlatformRelease):
  386. if self.count(value) == 0:
  387. self._dict[value.platform()] = []
  388. if self.should_add(value):
  389. print(f'Adding release {value.name} to the list')
  390. self._dict[value.platform()].append(value)
  391. else:
  392. print(f'Skipping release {value.name}')
  393. def get_attributes(self):
  394. res = []
  395. release: PlatformRelease
  396. for release in self.get_all():
  397. res.append(release.get_attributes())
  398. return res
  399. def get_minlen(self) -> int:
  400. return min([len(self.get_platform(p)) for p in self.get_platform_keys()])
  401. def got_all_packages(self) -> bool:
  402. return self.get_minlen() >= self.maxcount
  403. def should_add(self, release: PlatformRelease) -> bool:
  404. return self.count(release) <= self.maxcount
  405. def add_package(self, package: PlatformRelease, with_artifacts: bool = True):
  406. if self.branch != package.branch:
  407. Logger.debug(f'Skipping release {package.name} from branch {package.branch}')
  408. elif package.has_artifacts or not with_artifacts:
  409. self.append(package)
  410. @classmethod
  411. def get_last_commit_message(cls, repo_obj: Repository = None) -> str:
  412. last: Commit = cls.get_last_commit(repo_obj)
  413. if last is None:
  414. return ''
  415. else:
  416. return last.message.replace(Logger.NEWLINE_CHAR, ' ')
  417. @classmethod
  418. def get_last_author(cls, repo_obj: Repository = None) -> Signature:
  419. last: Commit = cls.get_last_commit(repo_obj)
  420. return last.author
  421. @classmethod
  422. def get_last_committer(cls, repo_obj: Repository = None) -> Signature:
  423. last: Commit = cls.get_last_commit(repo_obj)
  424. return last.committer
  425. @classmethod
  426. def get_last_commit(cls, repo_obj: Repository = None) -> Commit:
  427. loc_repo = repo_obj
  428. if cls.repo is None:
  429. cls.load_repository(os.getcwd())
  430. if loc_repo is None:
  431. loc_repo = cls.repo
  432. head: Reference = loc_repo.head
  433. target = head.target
  434. ref: Reference
  435. if cls.last_commit is None:
  436. try:
  437. cls.last_commit = loc_repo[target]
  438. print(
  439. f'Last commit for {head.shorthand} is {format_commit(cls.last_commit)}')
  440. except Exception as e:
  441. Logger.error(
  442. f'Unable to retrieve last commit for {head.shorthand}/{target}: {e}')
  443. cls.last_commit = None
  444. return cls.last_commit
  445. @classmethod
  446. def load_repository(cls, path: str = os.getcwd()) -> Repository:
  447. if cls.repo is None:
  448. try:
  449. print(f'Opening repository from {path}')
  450. cls.repo = Repository(path=path)
  451. except GitError as ex:
  452. print_error(f"Unable to access the repository({ex}).\nContent of {path}:\n{NEWLINE_CHAR.join(get_file_list(path, 1))}")
  453. raise
  454. return cls.repo
  455. @classmethod
  456. def resolve_commit(cls, repo: Repository, commit_id: str) -> Commit:
  457. commit: Commit
  458. reference: Reference
  459. commit, reference = repo.resolve_refish(commit_id)
  460. return commit
  461. @classmethod
  462. def get_branch_name(cls) -> str:
  463. return re.sub('[^a-zA-Z0-9\-~!@_\.]', '', cls.load_repository().head.shorthand)
  464. @classmethod
  465. def get_release_branch(cls, repo: Repository, platform_release) -> str:
  466. match = [t for t in repo.branches.with_commit(
  467. platform_release.target_commitish)]
  468. no_origin = [t for t in match if 'origin' not in t]
  469. if len(no_origin) == 0 and len(match) > 0:
  470. return match[0].split('/')[1]
  471. elif len(no_origin) > 0:
  472. return no_origin[0]
  473. return ''
  474. @classmethod
  475. def get_flash_parms(cls, file_path):
  476. flash = parse_json(file_path)
  477. od: collections.OrderedDict = collections.OrderedDict()
  478. for z in flash['flash_files'].items():
  479. base_name: str = os.path.basename(z[1])
  480. od[base_name.rstrip().lstrip()] = literal_eval(z[0])
  481. return collections.OrderedDict(sorted(od.items()))
  482. @classmethod
  483. def get_releases(cls, flash_file_path, maxcount: int, name_prefix):
  484. repo = Releases.load_repository(os.getcwd())
  485. packages: Releases = cls(branch=repo.head.shorthand, maxcount=maxcount)
  486. build_dir = os.path.dirname(flash_file_path)
  487. for page in range(1, 999):
  488. Logger.debug(f'Getting releases page {page}')
  489. releases = get_github_data(
  490. repo, f'releases?per_page=50&page={page}')
  491. if len(releases) == 0:
  492. Logger.debug(f'No more release found for page {page}')
  493. break
  494. for release_entry in [AttributeDict(platform) for platform in releases]:
  495. packages.add_package(PlatformRelease(flash_file_path, release_entry, build_dir,
  496. Releases.get_release_branch(repo, release_entry), name_prefix))
  497. if packages.got_all_packages():
  498. break
  499. if packages.got_all_packages():
  500. break
  501. return packages
  502. @classmethod
  503. def get_commit_list(cls) -> list:
  504. commit_list = []
  505. last: Commit = Releases.get_last_commit()
  506. if last is None:
  507. return commit_list
  508. try:
  509. for c in Releases.load_repository().walk(last.id, pygit2.GIT_SORT_TIME):
  510. if '[skip actions]' not in c.message:
  511. commit_list.append(format_commit(c))
  512. if len(commit_list) > 10:
  513. break
  514. except Exception as e:
  515. Logger.error(
  516. f'Unable to get commit list starting at {last.id}: {e}')
  517. return commit_list
  518. @classmethod
  519. def get_commit_list_descriptions(cls) -> str:
  520. return '<<~EOD\n### Revision Log\n'+Logger.NEWLINE_CHAR.join(cls.get_commit_list())+'\n~EOD'
  521. def update(self, *args, **kwargs):
  522. if args:
  523. if len(args) > 1:
  524. raise TypeError("update expected at most 1 arguments, "
  525. "got %d" % len(args))
  526. other = dict(args[0])
  527. for key in other:
  528. self[key] = other[key]
  529. for key in kwargs:
  530. self[key] = kwargs[key]
  531. def setdefault(self, key, value=None):
  532. if key not in self:
  533. self[key] = value
  534. return self[key]
  535. def set_workdir(args):
  536. print(f'setting work dir to: {args.cwd}')
  537. os.chdir(os.path.abspath(args.cwd))
  538. def parse_json(filename: str):
  539. fname = os.path.abspath(filename)
  540. folder: str = os.path.abspath(os.path.dirname(filename))
  541. print(f'Opening json file {fname} from {folder}')
  542. try:
  543. with open(fname) as f:
  544. content = f.read()
  545. Logger.debug(f'Loading json\n{content}')
  546. return json.loads(content)
  547. except JSONDecodeError as ex:
  548. Logger.error(f'Error parsing {content}')
  549. except Exception as ex:
  550. Logger.error(
  551. f"Unable to parse flasher args json file. Content of {folder}:{Logger.NEWLINE_CHAR.join(get_file_list(folder))}")
  552. raise
  553. def write_github_env_file(values,env_file):
  554. print(f'Writing content to {env_file}...')
  555. with open(env_file, "w") as env_file:
  556. for attr in [attr for attr in dir(values) if not attr.startswith('_')]:
  557. line = f'{attr}{"=" if attr != "description" else ""}{getattr(values,attr)}'
  558. print(line)
  559. env_file.write(f'{line}\n')
  560. os.environ[attr] = str(getattr(values, attr))
  561. print(f'Done writing to {env_file}!')
  562. def format_artifact_from_manifest(manif_json: AttributeDict):
  563. if len(manif_json) == 0:
  564. return 'Newest release'
  565. first = manif_json[0]
  566. return f'{first["branch"]}-{first["release_details"]["version"]}'
  567. def format_artifact_name(base_name: str = '', args=AttributeDict(os.environ)):
  568. return f'{base_name}{args.branch_name}-{args.node}-{args.depth}-{args.major}{args.build}'
  569. def handle_build_flags(args):
  570. set_workdir(args)
  571. print('Setting global build flags')
  572. commit_message: str = Releases.get_last_commit_message()
  573. github_env.mock = 1 if args.mock else 0
  574. github_env.release_flag = 1 if args.mock or args.force or 'release' in commit_message.lower() else 0
  575. github_env.ui_build = 1 if args.mock or args.ui_build or '[ui-build]' in commit_message.lower(
  576. ) or github_env.release_flag == 1 else 0
  577. write_github_env_file(github_env,os.environ.get('GITHUB_OUTPUT'))
  578. def write_version_number(file_path:str,env_details):
  579. # app_name="${TARGET_BUILD_NAME}.${DEPTH}.dev-$(git log --pretty=format:'%h' --max-count=1).${branch_name}"
  580. # echo "${app_name}">version.txt
  581. try:
  582. version:str = f'{env_details.TARGET_BUILD_NAME}.{env_details.DEPTH}.{env_details.major}.{env_details.BUILD_NUMBER}.{env_details.branch_name}'
  583. with open(file_path, "w") as version_file:
  584. version_file.write(version)
  585. except Exception as ex:
  586. Logger.error(f'Unable to set version string {version} in file {file_path}')
  587. raise Exception('Version error')
  588. Logger.notice(f'Firmware version set to {version}')
  589. def handle_environment(args):
  590. set_workdir(args)
  591. print('Setting environment variables...')
  592. commit_message: str = Releases.get_last_commit_message()
  593. last: Commit = Releases.get_last_commit()
  594. if last is not None:
  595. github_env.author_name = last.author.name
  596. github_env.author_email = last.author.email
  597. github_env.committer_name = last.committer.name
  598. github_env.committer_email = last.committer.email
  599. github_env.node = args.node
  600. github_env.depth = args.depth
  601. github_env.major = args.major
  602. github_env.build = args.build
  603. github_env.DEPTH = args.depth
  604. github_env.TARGET_BUILD_NAME = args.node
  605. github_env.build_version_prefix = args.major
  606. github_env.branch_name = Releases.get_branch_name()
  607. github_env.BUILD_NUMBER = str(args.build)
  608. github_env.tag = f'{args.node}.{args.depth}.{args.build}.{github_env.branch_name}'.rstrip()
  609. github_env.last_commit = commit_message
  610. github_env.DOCKER_IMAGE_NAME = args.docker
  611. github_env.name = f"{args.major}.{str(args.build)}-{args.depth}#v4.3#{args.node}#{github_env.branch_name}"
  612. github_env.artifact_prefix = format_artifact_name(
  613. 'squeezelite-esp32-', github_env)
  614. github_env.artifact_file_name = f"{github_env.artifact_prefix}.zip"
  615. github_env.artifact_bin_file_name = f"{github_env.artifact_prefix}.bin"
  616. github_env.PROJECT_VER = f'{args.node}-{ args.build }'
  617. github_env.description = Releases.get_commit_list_descriptions()
  618. write_github_env_file(github_env,args.env_file)
  619. write_version_number("version.txt",github_env)
  620. def handle_artifacts(args):
  621. set_workdir(args)
  622. print(f'Handling artifacts')
  623. for attr in artifacts_formats:
  624. target: str = os.path.relpath(attr[1].replace(artifacts_formats_outdir, args.outdir).replace(
  625. artifacts_formats_prefix, format_artifact_name()))
  626. source: str = os.path.relpath(attr[0])
  627. target_dir: str = os.path.dirname(target)
  628. print(f'Copying file {source} to {target}')
  629. try:
  630. os.makedirs(target_dir, exist_ok=True)
  631. shutil.copyfile(source, target, follow_symlinks=True)
  632. except Exception as ex:
  633. Logger.error(f"Error while copying {source} to {target}\nContent of {target_dir}:\n{Logger.NEWLINE_CHAR.join(get_file_list(os.path.dirname(attr[0].rstrip())))}")
  634. raise
  635. def delete_folder(path):
  636. '''Remov Read Only Files'''
  637. for root, dirs, files in os.walk(path, topdown=True):
  638. for dir in dirs:
  639. fulldirpath = os.path.join(root, dir)
  640. Logger.debug(f'Drilling down in {fulldirpath}')
  641. delete_folder(fulldirpath)
  642. for fname in files:
  643. full_path = os.path.join(root, fname)
  644. Logger.debug(f'Setting file read/write {full_path}')
  645. os.chmod(full_path, stat.S_IWRITE)
  646. Logger.debug(f'Deleting file {full_path}')
  647. os.remove(full_path)
  648. if os.path.exists(path):
  649. Logger.debug(f'Changing folder read/write {path}')
  650. os.chmod(path, stat.S_IWRITE)
  651. print(f'WARNING: Deleting Folder {path}')
  652. os.rmdir(path)
  653. def get_file_stats(path):
  654. fstat: os.stat_result = pathlib.Path(path).stat()
  655. # Convert file size to MB, KB or Bytes
  656. mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
  657. if (fstat.st_size > 1024 * 1024):
  658. return math.ceil(fstat.st_size / (1024 * 1024)), "MB", mtime
  659. elif (fstat.st_size > 1024):
  660. return math.ceil(fstat.st_size / 1024), "KB", mtime
  661. return fstat.st_size, "B", mtime
  662. def get_file_list(root_path, max_levels: int = 2) -> list:
  663. outlist: list = []
  664. for root, dirs, files in os.walk(root_path):
  665. path = os.path.relpath(root).split(os.sep)
  666. if len(path) <= max_levels:
  667. outlist.append(f'\n{root}')
  668. for file in files:
  669. full_name = os.path.join(root, file)
  670. fsize, unit, mtime = get_file_stats(full_name)
  671. outlist.append('{:s} {:8d} {:2s} {:18s}\t{:s}'.format(
  672. len(path) * "---", fsize, unit, mtime, file))
  673. return outlist
  674. def get_recursive_list(path) -> list:
  675. outlist: list = []
  676. for root, dirs, files in os.walk(path, topdown=True):
  677. for fname in files:
  678. outlist.append((fname, os.path.join(root, fname)))
  679. return outlist
  680. def handle_manifest(args):
  681. set_workdir(args)
  682. print(f'Creating the web installer manifest')
  683. outdir: str = os.path.relpath(args.outdir)
  684. if not os.path.exists(outdir):
  685. print(f'Creating target folder {outdir}')
  686. os.makedirs(outdir, exist_ok=True)
  687. releases: Releases = Releases.get_releases(
  688. args.flash_file, args.max_count, args.manif_name)
  689. release: PlatformRelease
  690. for release in releases.get_all():
  691. manifest_name = release.get_manifest_name()
  692. release.get_zip_file()
  693. man = copy.deepcopy(manifest)
  694. man['manifest_name'] = manifest_name
  695. man['builds'][0]['parts'] = release.process_files(args.outdir)
  696. man['name'] = release.platform()
  697. man['version'] = release.release_details.version
  698. Logger.debug(f'Generated manifest: \n{json.dumps(man)}')
  699. fullpath = os.path.join(args.outdir, release.get_manifest_name())
  700. print(f'Writing manifest to {fullpath}')
  701. with open(fullpath, "w") as f:
  702. json.dump(man, f, indent=4)
  703. release.cleanup()
  704. mainmanifest = os.path.join(args.outdir, args.manif_name)
  705. print(f'Writing main manifest {mainmanifest}')
  706. with open(mainmanifest, 'w') as f:
  707. json.dump(releases.get_attributes(), f, indent=4)
  708. def get_new_file_names(manif_json) -> collections.OrderedDict():
  709. new_release_files: dict = collections.OrderedDict()
  710. for artifact in manif_json:
  711. for name in [f["name"] for f in artifact["bin_files"]]:
  712. new_release_files[name] = artifact
  713. new_release_files[artifact["manifest_name"]] = artifact["name"]
  714. return new_release_files
  715. def copy_no_overwrite(source: str, target: str):
  716. sfiles = os.listdir(source)
  717. for f in sfiles:
  718. source_file = os.path.join(source, f)
  719. target_file = os.path.join(target, f)
  720. if not os.path.exists(target_file):
  721. print(f'Copying {f} to target')
  722. shutil.copy(source_file, target_file)
  723. else:
  724. Logger.debug(f'Skipping existing file {f}')
  725. def get_changed_items(repo: Repository) -> Dict:
  726. changed_filemode_status_code: int = pygit2.GIT_FILEMODE_TREE
  727. original_status_dict: Dict[str, int] = repo.status()
  728. # transfer any non-filemode changes to a new dictionary
  729. status_dict: Dict[str, int] = {}
  730. for filename, code in original_status_dict.items():
  731. if code != changed_filemode_status_code:
  732. status_dict[filename] = code
  733. return status_dict
  734. def is_dirty(repo: Repository) -> bool:
  735. return len(get_changed_items(repo)) > 0
  736. def push_with_method(auth_method:str,token:str,remote: Remote,reference):
  737. success:bool = False
  738. try:
  739. remote.push(reference, callbacks=RemoteCallbacks(pygit2.UserPass(auth_method, token)))
  740. success=True
  741. except Exception as ex:
  742. print_error(f'Error pushing with auth method {auth_method}: {ex}.')
  743. return success
  744. def push_if_change(repo: Repository, token: str, source_path: str, manif_json):
  745. if is_dirty(repo):
  746. print(f'Changes found. Preparing commit')
  747. env = AttributeDict(os.environ)
  748. index: Index = repo.index
  749. index.add_all()
  750. index.write()
  751. reference = repo.head.name
  752. message = f'Web installer for {format_artifact_from_manifest(manif_json)}'
  753. tree = index.write_tree()
  754. Releases.load_repository(source_path)
  755. commit = repo.create_commit(reference, Releases.get_last_author(
  756. ), Releases.get_last_committer(), message, tree, [repo.head.target])
  757. origin: Remote = repo.remotes['origin']
  758. print(
  759. f'Pushing commit {format_commit(repo[commit])} to url {origin.url}')
  760. remote: Remote = repo.remotes['origin']
  761. # remote.credentials = credentials
  762. auth_method = 'x-access-token'
  763. remote.push([reference], callbacks=RemoteCallbacks(
  764. pygit2.UserPass(auth_method, token)))
  765. print(
  766. f'::notice Web installer updated for {format_artifact_from_manifest(manif_json)}')
  767. else:
  768. print(f'WARNING: No change found. Skipping update')
  769. def update_files(target_artifacts: str, manif_json, source: str):
  770. new_list: dict = get_new_file_names(manif_json)
  771. if os.path.exists(target_artifacts):
  772. print(f'Removing obsolete files from {target_artifacts}')
  773. for entry in get_recursive_list(target_artifacts):
  774. f = entry[0]
  775. full_target = entry[1]
  776. if f not in new_list.keys():
  777. print(f'WARNING: Removing obsolete file {f}')
  778. os.remove(full_target)
  779. else:
  780. print(f'Creating target folder {target_artifacts}')
  781. os.makedirs(target_artifacts, exist_ok=True)
  782. print(f'Copying installer files to {target_artifacts}:')
  783. copy_no_overwrite(os.path.abspath(source), target_artifacts)
  784. def handle_pushinstaller(args):
  785. set_workdir(args)
  786. print('Pushing web installer updates... ')
  787. target_artifacts = os.path.join(args.target, args.artifacts)
  788. if os.path.exists(args.target):
  789. print(f'Removing files (if any) from {args.target}')
  790. delete_folder(args.target)
  791. print(f'Cloning from {args.url} into {args.target}')
  792. repo = pygit2.clone_repository(args.url, args.target)
  793. repo.checkout_head()
  794. manif_json = parse_json(os.path.join(args.source, args.manif_name))
  795. update_files(target_artifacts, manif_json, args.source)
  796. push_if_change(repo, args.token, args.cwd, manif_json)
  797. repo.state_cleanup()
  798. def handle_show(args):
  799. print('Show')
  800. def extract_files_from_archive(url):
  801. tempfolder = tempfile.mkdtemp()
  802. platform:Response = requests.get(url)
  803. Logger.debug(f'Downloading {url} to {tempfolder}')
  804. Logger.debug(f'Transfer status code: {platform.status_code}. Expanding content')
  805. z = zipfile.ZipFile(io.BytesIO(platform.content))
  806. z.extractall(tempfolder)
  807. return tempfolder
  808. def handle_list_files(args):
  809. print(f'Content of {args.cwd}:')
  810. print(Logger.NEWLINE_CHAR.join(get_file_list(args.cwd)))
  811. parser_environment.set_defaults(func=handle_environment, cmd='environment')
  812. parser_manifest.set_defaults(func=handle_manifest, cmd='manifest')
  813. parser_pushinstaller.set_defaults(func=handle_pushinstaller, cmd='installer')
  814. parser_show.set_defaults(func=handle_show, cmd='show')
  815. parser_build_flags.set_defaults(func=handle_build_flags, cmd='build_flags')
  816. parser_dir.set_defaults(func=handle_list_files, cmd='list_files')
  817. def main():
  818. exit_result_code = 0
  819. args = parser.parse_args()
  820. print(f'::group::{args.command}')
  821. print(f'build_tools version : {tool_version}')
  822. print(f'Processing command {args.command}')
  823. func: Callable = getattr(args, 'func', None)
  824. if func is not None:
  825. # Call whatever subcommand function was selected
  826. e: Exception
  827. try:
  828. func(args)
  829. except Exception as e:
  830. Logger.error(f'Critical error while running {args.command}\n{" ".join(traceback.format_exception(etype=type(e), value=e, tb=e.__traceback__))}')
  831. exit_result_code = 1
  832. else:
  833. # No subcommand was provided, so call help
  834. parser.print_usage()
  835. print(f'::endgroup::')
  836. sys.exit(exit_result_code)
  837. if __name__ == '__main__':
  838. main()