build_tools.py 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995
  1. #!/usr/bin/env python
  2. from json import JSONDecodeError
  3. import math
  4. import pathlib
  5. import time
  6. import traceback
  7. from typing import Callable, Dict
  8. import pkg_resources
  9. import sys
  10. import os
  11. import io
  12. from os import walk
  13. from requests import Response
  14. class Logger:
  15. NEWLINE_CHAR = '\n'
  16. with_crlf = False
  17. @classmethod
  18. def print_message(cls,message,prefix=''):
  19. if not Logger.with_crlf:
  20. trimmed=re.sub(r'\n', r'%0A', message,flags=re.MULTILINE)
  21. print(f'{prefix}{trimmed}')
  22. @classmethod
  23. def debug(cls,message):
  24. cls.print_message(message,'::debug::')
  25. @classmethod
  26. def error(cls,message):
  27. cls.print_message(message,'::error::')
  28. @classmethod
  29. def notice(cls,message):
  30. cls.print_message(message,'::notice::')
  31. @classmethod
  32. def warning(cls,message):
  33. cls.print_message(message,'::notice::')
  34. try:
  35. import argparse
  36. import collections
  37. import copy
  38. import enum
  39. import glob
  40. import json
  41. import re
  42. import shutil
  43. import stat
  44. import tempfile
  45. import zipfile
  46. from ast import literal_eval
  47. from collections import namedtuple
  48. from datetime import datetime, timedelta, timezone
  49. from json import JSONDecoder
  50. from operator import contains
  51. from platform import platform, release
  52. from pydoc import describe
  53. from time import strftime
  54. from typing import OrderedDict
  55. from urllib import response
  56. from urllib.parse import urlparse
  57. from urllib.request import Request
  58. from webbrowser import get
  59. import pygit2
  60. from pygit2 import Commit, Repository, GitError, Reference, UserPass, Index, Signature, RemoteCallbacks, Remote
  61. import requests
  62. from genericpath import isdir
  63. except ImportError as ex:
  64. Logger.error(
  65. f'Failed importing module {ex.name}, using interpreter {sys.executable}. {Logger.NEWLINE_CHAR} Installed packages:')
  66. installed_packages = pkg_resources.working_set
  67. installed_packages_list = sorted(
  68. ["%s==%s" % (i.key, i.version) for i in installed_packages])
  69. print(Logger.NEWLINE_CHAR.join(installed_packages_list))
  70. print(f'Environment: ')
  71. envlist = "\n".join([f"{k}={v}" for k, v in sorted(os.environ.items())])
  72. print(f'{envlist}')
  73. raise
  74. tool_version = "1.0.7"
  75. WEB_INSTALLER_DEFAULT_PATH = './web_installer/'
  76. FORMAT = '%(asctime)s %(message)s'
  77. github_env = type('', (), {})()
  78. manifest = {
  79. "name": "",
  80. "version": "",
  81. "home_assistant_domain": "slim_player",
  82. "funding_url": "https://esphome.io/guides/supporters.html",
  83. "new_install_prompt_erase": True,
  84. "new_install_improv_wait_time" : 20,
  85. "builds": [
  86. {
  87. "chipFamily": "ESP32",
  88. "parts": [
  89. ]
  90. }
  91. ]
  92. }
  93. artifacts_formats_outdir = '$OUTDIR'
  94. artifacts_formats_prefix = '$PREFIX'
  95. artifacts_formats = [
  96. ['build/squeezelite.bin', '$OUTDIR/$PREFIX-squeezelite.bin'],
  97. ['build/recovery.bin', '$OUTDIR/$PREFIX-recovery.bin'],
  98. ['build/ota_data_initial.bin', '$OUTDIR/$PREFIX-ota_data_initial.bin'],
  99. ['build/bootloader/bootloader.bin', '$OUTDIR/$PREFIX-bootloader.bin'],
  100. ['build/partition_table/partition-table.bin ',
  101. '$OUTDIR/$PREFIX-partition-table.bin'],
  102. ]
  103. class AttributeDict(dict):
  104. __slots__ = ()
  105. def __getattr__(self, name: str):
  106. try:
  107. return self[name.upper()]
  108. except Exception:
  109. try:
  110. return self[name.lower()]
  111. except Exception:
  112. for attr in self.keys():
  113. if name.lower() == attr.replace("'", "").lower():
  114. return self[attr]
  115. __setattr__ = dict.__setitem__
  116. parser = argparse.ArgumentParser(
  117. description='Handles some parts of the squeezelite-esp32 build process')
  118. parser.add_argument('--cwd', type=str,
  119. help='Working directory', default=os.getcwd())
  120. parser.add_argument('--with_crlf', action='store_true',help='To prevent replacing cr/lf with hex representation')
  121. parser.add_argument('--loglevel', type=str, choices={
  122. 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'}, help='Logging level', default='INFO')
  123. subparsers = parser.add_subparsers(dest='command', required=True)
  124. parser_commits = subparsers.add_parser("list_commits",add_help=False,
  125. description="Commits list",
  126. help="Lists the last commits"
  127. )
  128. parser_dir = subparsers.add_parser("list_files",
  129. add_help=False,
  130. description="List Files parser",
  131. help="Display the content of the folder")
  132. parser_manifest = subparsers.add_parser("manifest",
  133. add_help=False,
  134. description="Manifest parser",
  135. help="Handles the web installer manifest creation")
  136. parser_manifest.add_argument('--flash_file', required=True, type=str,
  137. help='The file path which contains the firmware flashing definition')
  138. parser_manifest.add_argument(
  139. '--max_count', type=int, help='The maximum number of releases to keep', default=3)
  140. parser_manifest.add_argument(
  141. '--manif_name', required=True, type=str, help='Manifest files name and prefix')
  142. parser_manifest.add_argument(
  143. '--outdir', required=True, type=str, help='Output directory for files and manifests')
  144. parser_pushinstaller = subparsers.add_parser("pushinstaller",
  145. add_help=False,
  146. description="Web Installer Checkout parser",
  147. help="Handles the creation of artifacts files")
  148. parser_pushinstaller.add_argument(
  149. '--target', type=str, help='Output directory for web installer repository', default=WEB_INSTALLER_DEFAULT_PATH)
  150. parser_pushinstaller.add_argument(
  151. '--artifacts', type=str, help='Target subdirectory for web installer artifacts', default=WEB_INSTALLER_DEFAULT_PATH)
  152. parser_pushinstaller.add_argument(
  153. '--source', type=str, help='Source directory for the installer artifacts', default=WEB_INSTALLER_DEFAULT_PATH)
  154. parser_pushinstaller.add_argument('--url', type=str, help='Web Installer clone url ',
  155. default='https://github.com/sle118/squeezelite-esp32-installer.git')
  156. parser_pushinstaller.add_argument(
  157. '--web_installer_branch', type=str, help='Web Installer branch to use ', default='main')
  158. parser_pushinstaller.add_argument(
  159. '--token', type=str, help='Auth token for pushing changes')
  160. parser_pushinstaller.add_argument(
  161. '--flash_file', type=str, help='Manifest json file path')
  162. parser_pushinstaller.add_argument(
  163. '--manif_name', required=True, type=str, help='Manifest files name and prefix')
  164. parser_environment = subparsers.add_parser("environment",
  165. add_help=False,
  166. description="Environment parser",
  167. help="Updates the build environment")
  168. parser_environment.add_argument(
  169. '--env_file', type=str, help='Environment File', default=os.environ.get('GITHUB_ENV'))
  170. parser_environment.add_argument(
  171. '--build', required=True, type=int, help='The build number')
  172. parser_environment.add_argument(
  173. '--node', required=True, type=str, help='The matrix node being built')
  174. parser_environment.add_argument(
  175. '--depth', required=True, type=int, help='The bit depth being built')
  176. parser_environment.add_argument(
  177. '--major', type=str, help='Major version', default='2')
  178. parser_environment.add_argument(
  179. '--docker', type=str, help='Docker image to use', default='sle118/squeezelite-esp32-idfv43')
  180. parser_show = subparsers.add_parser("show",
  181. add_help=False,
  182. description="Show parser",
  183. help="Show the build environment")
  184. parser_build_flags = subparsers.add_parser("build_flags",
  185. add_help=False,
  186. description="Build Flags",
  187. help="Updates the build environment with build flags")
  188. parser_build_flags.add_argument(
  189. '--mock', action='store_true', help='Mock release')
  190. parser_build_flags.add_argument(
  191. '--force', action='store_true', help='Force a release build')
  192. parser_build_flags.add_argument(
  193. '--ui_build', action='store_true', help='Include building the web UI')
  194. def format_commit(commit):
  195. # 463a9d8b7 Merge branch 'bugfix/ci_deploy_tags_v4.0' into 'release/v4.0' (2020-01-11T14:08:55+08:00)
  196. dt = datetime.fromtimestamp(float(commit.author.time), timezone(
  197. timedelta(minutes=commit.author.offset)))
  198. #timestr = dt.strftime('%c%z')
  199. timestr = dt.strftime('%F %R %Z')
  200. cmesg:str = commit.message.replace('\n', ' ').replace('\r','').replace('*','-')
  201. return f'{commit.short_id} {cmesg} ({timestr}) <{commit.author.name}>'.replace(' ', ' ', )
  202. def get_github_data(repo: Repository, api):
  203. base_url = urlparse(repo.remotes['origin'].url)
  204. print(
  205. f'Base URL is {base_url.path} from remote URL {repo.remotes["origin"].url}')
  206. url_parts = base_url.path.split('.')
  207. for p in url_parts:
  208. print(f'URL Part: {p}')
  209. api_url = f"{url_parts[0]}/{api}"
  210. print(f'API to call: {api_url}')
  211. url = f"https://api.github.com/repos{api_url}"
  212. resp = requests.get(
  213. url, headers={"Content-Type": "application/vnd.github.v3+json"})
  214. return json.loads(resp.text)
  215. def dump_directory(dir_path):
  216. # list to store files name
  217. res = []
  218. for (dir_path, dir_names, file_names) in walk(dir_path):
  219. res.extend(file_names)
  220. print(res)
  221. class ReleaseDetails():
  222. version: str
  223. idf: str
  224. platform: str
  225. branch: str
  226. bitrate: str
  227. def __init__(self, tag: str) -> None:
  228. self.version, self.idf, self.platform, self.branch = tag.split('#')
  229. try:
  230. self.version, self.bitrate = self.version.split('-')
  231. except Exception:
  232. pass
  233. def get_attributes(self):
  234. return {
  235. 'version': self.version,
  236. 'idf': self.idf,
  237. 'platform': self.platform,
  238. 'branch': self.branch,
  239. 'bitrate': self.bitrate
  240. }
  241. def format_prefix(self) -> str:
  242. return f'{self.branch}-{self.platform}-{self.version}'
  243. def get_full_platform(self):
  244. return f"{self.platform}{f'-{self.bitrate}' if self.bitrate is not None else ''}"
  245. class BinFile():
  246. name: str
  247. offset: int
  248. source_full_path: str
  249. target_name: str
  250. target_fullpath: str
  251. artifact_relpath: str
  252. def __init__(self, source_path, file_build_path: str, offset: int, release_details: ReleaseDetails, build_dir) -> None:
  253. self.name = os.path.basename(file_build_path).rstrip()
  254. self.artifact_relpath = os.path.relpath(
  255. file_build_path, build_dir).rstrip()
  256. self.source_path = source_path
  257. self.source_full_path = os.path.join(
  258. source_path, file_build_path).rstrip()
  259. self.offset = offset
  260. self.target_name = f'{release_details.format_prefix()}-{release_details.bitrate}-{self.name}'.rstrip()
  261. def get_manifest(self):
  262. return {"path": self.target_name, "offset": self.offset}
  263. def copy(self, target_folder) -> str:
  264. self.target_fullpath = os.path.join(target_folder, self.target_name)
  265. Logger.debug(
  266. f'File {self.source_full_path} will be copied to {self.target_fullpath}')
  267. try:
  268. os.makedirs(target_folder, exist_ok=True)
  269. shutil.copyfile(self.source_full_path,
  270. self.target_fullpath, follow_symlinks=True)
  271. except Exception as ex:
  272. Logger.error(f"Error while copying {self.source_full_path} to {self.target_fullpath}{Logger.NEWLINE_CHAR}Content of {os.path.dirname(self.source_full_path.rstrip())}:{Logger.NEWLINE_CHAR}{Logger.NEWLINE_CHAR.join(get_file_list(os.path.dirname(self.source_full_path.rstrip())))}")
  273. raise
  274. return self.target_fullpath
  275. def get_attributes(self):
  276. return {
  277. 'name': self.target_name,
  278. 'offset': self.offset,
  279. 'artifact_relpath': self.artifact_relpath
  280. }
  281. class PlatformRelease():
  282. name: str
  283. description: str
  284. url: str = ''
  285. zipfile: str = ''
  286. tempfolder: str
  287. release_details: ReleaseDetails
  288. flash_parms = {}
  289. build_dir: str
  290. has_artifacts: bool
  291. branch: str
  292. assets: list
  293. bin_files: list
  294. name_prefix: str
  295. flash_file_path: str
  296. def get_manifest_name(self) -> str:
  297. return f'{self.name_prefix}-{self.release_details.format_prefix()}-{self.release_details.bitrate}.json'
  298. def __init__(self, flash_file_path, git_release, build_dir, branch, name_prefix) -> None:
  299. self.name = git_release.tag_name
  300. self.description = git_release.body
  301. self.assets = git_release['assets']
  302. self.has_artifacts = False
  303. self.name_prefix = name_prefix
  304. if len(self.assets) > 0:
  305. if self.has_asset_type():
  306. self.url = self.get_asset_from_extension().browser_download_url
  307. if self.has_asset_type('.zip'):
  308. self.zipfile = self.get_asset_from_extension(
  309. ext='.zip').browser_download_url
  310. self.has_artifacts = True
  311. self.release_details = ReleaseDetails(git_release.name)
  312. self.bin_files = list()
  313. self.flash_file_path = flash_file_path
  314. self.build_dir = os.path.relpath(build_dir)
  315. self.branch = branch
  316. def process_files(self, outdir: str) -> list:
  317. parts = []
  318. for f in self.bin_files:
  319. f.copy(outdir)
  320. parts.append(f.get_manifest())
  321. return parts
  322. def get_asset_from_extension(self, ext='.bin'):
  323. for a in self.assets:
  324. filename = AttributeDict(a).name
  325. file_name, file_extension = os.path.splitext(filename)
  326. if file_extension == ext:
  327. return AttributeDict(a)
  328. return None
  329. def has_asset_type(self, ext='.bin') -> bool:
  330. return self.get_asset_from_extension(ext) is not None
  331. def platform(self):
  332. return self.release_details.get_full_platform()
  333. def get_zip_file(self):
  334. self.tempfolder = extract_files_from_archive(self.zipfile)
  335. print(
  336. f'Artifacts for {self.name} extracted to {self.tempfolder}')
  337. flash_parms_file = os.path.relpath(
  338. self.tempfolder+self.flash_file_path)
  339. line: str
  340. with open(flash_parms_file) as fin:
  341. for line in fin:
  342. components = line.split()
  343. if len(components) == 2:
  344. self.flash_parms[os.path.basename(
  345. components[1]).rstrip().lstrip()] = components[0]
  346. try:
  347. for artifact in artifacts_formats:
  348. base_name = os.path.basename(artifact[0]).rstrip().lstrip()
  349. self.bin_files.append(BinFile(
  350. self.tempfolder, artifact[0], self.flash_parms[base_name], self.release_details, self.build_dir))
  351. has_artifacts = True
  352. except Exception:
  353. self.has_artifacts = False
  354. def cleanup(self):
  355. Logger.debug(f'removing temp directory for platform release {self.name}')
  356. shutil.rmtree(self.tempfolder)
  357. def get_attributes(self):
  358. return {
  359. 'name': self.name,
  360. 'branch': self.branch,
  361. 'description': self.description,
  362. 'url': self.url,
  363. 'zipfile': self.zipfile,
  364. 'release_details': self.release_details.get_attributes(),
  365. 'bin_files': [b.get_attributes() for b in self.bin_files],
  366. 'manifest_name': self.get_manifest_name()
  367. }
  368. class Releases():
  369. _dict: dict = collections.OrderedDict()
  370. maxcount: int = 0
  371. branch: str = ''
  372. repo: Repository = None
  373. last_commit: Commit = None
  374. manifest_name: str
  375. def __init__(self, branch: str, maxcount: int = 3) -> None:
  376. self.maxcount = maxcount
  377. self.branch = branch
  378. def count(self, value: PlatformRelease) -> int:
  379. content = self._dict.get(value.platform())
  380. if content == None:
  381. return 0
  382. return len(content)
  383. def get_platform(self, platform: str) -> list:
  384. return self._dict[platform]
  385. def get_platform_keys(self):
  386. return self._dict.keys()
  387. def get_all(self) -> list:
  388. result: list = []
  389. for platform in [self.get_platform(platform) for platform in self.get_platform_keys()]:
  390. for release in platform:
  391. result.append(release)
  392. return result
  393. def append(self, value: PlatformRelease):
  394. if self.count(value) == 0:
  395. self._dict[value.platform()] = []
  396. if self.should_add(value):
  397. print(f'Adding release {value.name} to the list')
  398. self._dict[value.platform()].append(value)
  399. else:
  400. print(f'Skipping release {value.name}')
  401. def get_attributes(self):
  402. res = []
  403. release: PlatformRelease
  404. for release in self.get_all():
  405. res.append(release.get_attributes())
  406. return res
  407. def get_minlen(self) -> int:
  408. return min([len(self.get_platform(p)) for p in self.get_platform_keys()])
  409. def got_all_packages(self) -> bool:
  410. return self.get_minlen() >= self.maxcount
  411. def should_add(self, release: PlatformRelease) -> bool:
  412. return self.count(release) <= self.maxcount
  413. def add_package(self, package: PlatformRelease, with_artifacts: bool = True):
  414. if self.branch != package.branch:
  415. Logger.debug(f'Skipping release {package.name} from branch {package.branch}')
  416. elif package.has_artifacts or not with_artifacts:
  417. self.append(package)
  418. @classmethod
  419. def get_last_commit_message(cls, repo_obj: Repository = None) -> str:
  420. last: Commit = cls.get_last_commit(repo_obj)
  421. if last is None:
  422. return ''
  423. else:
  424. return last.message.replace(Logger.NEWLINE_CHAR, ' ')
  425. @classmethod
  426. def get_last_author(cls, repo_obj: Repository = None) -> Signature:
  427. last: Commit = cls.get_last_commit(repo_obj)
  428. return last.author
  429. @classmethod
  430. def get_last_committer(cls, repo_obj: Repository = None) -> Signature:
  431. last: Commit = cls.get_last_commit(repo_obj)
  432. return last.committer
  433. @classmethod
  434. def get_last_commit(cls, repo_obj: Repository = None) -> Commit:
  435. loc_repo = repo_obj
  436. if cls.repo is None:
  437. cls.load_repository(os.getcwd())
  438. if loc_repo is None:
  439. loc_repo = cls.repo
  440. head: Reference = loc_repo.head
  441. target = head.target
  442. ref: Reference
  443. if cls.last_commit is None:
  444. try:
  445. cls.last_commit = loc_repo[target]
  446. print(
  447. f'Last commit for {head.shorthand} is {format_commit(cls.last_commit)}')
  448. except Exception as e:
  449. Logger.error(
  450. f'Unable to retrieve last commit for {head.shorthand}/{target}: {e}')
  451. cls.last_commit = None
  452. return cls.last_commit
  453. @classmethod
  454. def load_repository(cls, path: str = os.getcwd()) -> Repository:
  455. if cls.repo is None:
  456. try:
  457. print(f'Opening repository from {path}')
  458. cls.repo = Repository(path=path)
  459. except GitError as ex:
  460. Logger.error(f"Unable to access the repository({ex}).\nContent of {path}:\n{Logger.NEWLINE_CHAR.join(get_file_list(path, 1))}")
  461. raise
  462. return cls.repo
  463. @classmethod
  464. def resolve_commit(cls, repo: Repository, commit_id: str) -> Commit:
  465. commit: Commit
  466. reference: Reference
  467. commit, reference = repo.resolve_refish(commit_id)
  468. return commit
  469. @classmethod
  470. def get_branch_name(cls) -> str:
  471. return re.sub('[^a-zA-Z0-9\-~!@_\.]', '', cls.load_repository().head.shorthand)
  472. @classmethod
  473. def get_release_branch(cls, repo: Repository, platform_release) -> str:
  474. match = [t for t in repo.branches.with_commit(
  475. platform_release.target_commitish)]
  476. no_origin = [t for t in match if 'origin' not in t]
  477. if len(no_origin) == 0 and len(match) > 0:
  478. return match[0].split('/')[1]
  479. elif len(no_origin) > 0:
  480. return no_origin[0]
  481. return ''
  482. @classmethod
  483. def get_flash_parms(cls, file_path):
  484. flash = parse_json(file_path)
  485. od: collections.OrderedDict = collections.OrderedDict()
  486. for z in flash['flash_files'].items():
  487. base_name: str = os.path.basename(z[1])
  488. od[base_name.rstrip().lstrip()] = literal_eval(z[0])
  489. return collections.OrderedDict(sorted(od.items()))
  490. @classmethod
  491. def get_releases(cls, flash_file_path, maxcount: int, name_prefix):
  492. repo = Releases.load_repository(os.getcwd())
  493. packages: Releases = cls(branch=repo.head.shorthand, maxcount=maxcount)
  494. build_dir = os.path.dirname(flash_file_path)
  495. for page in range(1, 999):
  496. Logger.debug(f'Getting releases page {page}')
  497. releases = get_github_data(
  498. repo, f'releases?per_page=50&page={page}')
  499. if len(releases) == 0:
  500. Logger.debug(f'No more release found for page {page}')
  501. break
  502. for release_entry in [AttributeDict(platform) for platform in releases]:
  503. packages.add_package(PlatformRelease(flash_file_path, release_entry, build_dir,
  504. Releases.get_release_branch(repo, release_entry), name_prefix))
  505. if packages.got_all_packages():
  506. break
  507. if packages.got_all_packages():
  508. break
  509. return packages
  510. @classmethod
  511. def get_commit_list(cls) -> list:
  512. commit_list = []
  513. last: Commit = Releases.get_last_commit()
  514. if last is None:
  515. return commit_list
  516. try:
  517. for c in Releases.load_repository().walk(last.id, pygit2.GIT_SORT_TIME):
  518. if '[skip actions]' not in c.message:
  519. commit_list.append(format_commit(c))
  520. if len(commit_list) > 10:
  521. break
  522. except Exception as e:
  523. Logger.error(
  524. f'Unable to get commit list starting at {last.id}: {e}')
  525. return commit_list
  526. @classmethod
  527. def get_commit_list_descriptions(cls) -> str:
  528. return '<<~EOD\n### Revision Log\n'+Logger.NEWLINE_CHAR.join(cls.get_commit_list())+'\n~EOD'
  529. def update(self, *args, **kwargs):
  530. if args:
  531. if len(args) > 1:
  532. raise TypeError("update expected at most 1 arguments, "
  533. "got %d" % len(args))
  534. other = dict(args[0])
  535. for key in other:
  536. self[key] = other[key]
  537. for key in kwargs:
  538. self[key] = kwargs[key]
  539. def setdefault(self, key, value=None):
  540. if key not in self:
  541. self[key] = value
  542. return self[key]
  543. def set_workdir(args):
  544. print(f'setting work dir to: {args.cwd}')
  545. os.chdir(os.path.abspath(args.cwd))
  546. def parse_json(filename: str):
  547. fname = os.path.abspath(filename)
  548. folder: str = os.path.abspath(os.path.dirname(filename))
  549. print(f'Opening json file {fname} from {folder}')
  550. try:
  551. with open(fname) as f:
  552. content = f.read()
  553. Logger.debug(f'Loading json\n{content}')
  554. return json.loads(content)
  555. except JSONDecodeError as ex:
  556. Logger.error(f'Error parsing {content}')
  557. except Exception as ex:
  558. Logger.error(
  559. f"Unable to parse flasher args json file. Content of {folder}:{Logger.NEWLINE_CHAR.join(get_file_list(folder))}")
  560. raise
  561. def write_github_env_file(values,env_file):
  562. print(f'Writing content to {env_file}...')
  563. with open(env_file, "w") as env_file:
  564. for attr in [attr for attr in dir(values) if not attr.startswith('_')]:
  565. line = f'{attr}{"=" if attr != "description" else ""}{getattr(values,attr)}'
  566. print(line)
  567. env_file.write(f'{line}\n')
  568. os.environ[attr] = str(getattr(values, attr))
  569. print(f'Done writing to {env_file}!')
  570. def format_artifact_from_manifest(manif_json: AttributeDict):
  571. if len(manif_json) == 0:
  572. return 'Newest release'
  573. first = manif_json[0]
  574. return f'{first["branch"]}-{first["release_details"]["version"]}'
  575. def format_artifact_name(base_name: str = '', args=AttributeDict(os.environ)):
  576. return f'{base_name}{args.branch_name}-{args.node}-{args.depth}-{args.major}{args.build}'
  577. def handle_build_flags(args):
  578. set_workdir(args)
  579. print('Setting global build flags')
  580. commit_message: str = Releases.get_last_commit_message()
  581. github_env.mock = 1 if args.mock else 0
  582. github_env.release_flag = 1 if args.mock or args.force or 'release' in commit_message.lower() else 0
  583. github_env.ui_build = 1 if args.mock or args.ui_build or '[ui-build]' in commit_message.lower(
  584. ) or github_env.release_flag == 1 else 0
  585. write_github_env_file(github_env,os.environ.get('GITHUB_OUTPUT'))
  586. def write_version_number(file_path:str,env_details):
  587. # app_name="${TARGET_BUILD_NAME}.${DEPTH}.dev-$(git log --pretty=format:'%h' --max-count=1).${branch_name}"
  588. # echo "${app_name}">version.txt
  589. try:
  590. version:str = f'{env_details.TARGET_BUILD_NAME}.{env_details.DEPTH}.{env_details.major}.{env_details.BUILD_NUMBER}.{env_details.branch_name}'
  591. with open(file_path, "w") as version_file:
  592. version_file.write(version)
  593. except Exception as ex:
  594. Logger.error(f'Unable to set version string {version} in file {file_path}')
  595. raise Exception('Version error')
  596. Logger.notice(f'Firmware version set to {version}')
  597. def handle_environment(args):
  598. set_workdir(args)
  599. print('Setting environment variables...')
  600. commit_message: str = Releases.get_last_commit_message()
  601. last: Commit = Releases.get_last_commit()
  602. if last is not None:
  603. github_env.author_name = last.author.name
  604. github_env.author_email = last.author.email
  605. github_env.committer_name = last.committer.name
  606. github_env.committer_email = last.committer.email
  607. github_env.node = args.node
  608. github_env.depth = args.depth
  609. github_env.major = args.major
  610. github_env.build = args.build
  611. github_env.DEPTH = args.depth
  612. github_env.TARGET_BUILD_NAME = args.node
  613. github_env.build_version_prefix = args.major
  614. github_env.branch_name = Releases.get_branch_name()
  615. github_env.BUILD_NUMBER = str(args.build)
  616. github_env.tag = f'{args.node}.{args.depth}.{args.build}.{github_env.branch_name}'.rstrip()
  617. github_env.last_commit = commit_message
  618. github_env.DOCKER_IMAGE_NAME = args.docker
  619. github_env.name = f"{args.major}.{str(args.build)}-{args.depth}#v4.3#{args.node}#{github_env.branch_name}"
  620. github_env.artifact_prefix = format_artifact_name(
  621. 'squeezelite-esp32-', github_env)
  622. github_env.artifact_file_name = f"{github_env.artifact_prefix}.zip"
  623. github_env.artifact_bin_file_name = f"{github_env.artifact_prefix}.bin"
  624. github_env.PROJECT_VER = f'{args.node}-{ args.build }'
  625. github_env.description = Releases.get_commit_list_descriptions()
  626. write_github_env_file(github_env,args.env_file)
  627. write_version_number("version.txt",github_env)
  628. def handle_artifacts(args):
  629. set_workdir(args)
  630. print(f'Handling artifacts')
  631. for attr in artifacts_formats:
  632. target: str = os.path.relpath(attr[1].replace(artifacts_formats_outdir, args.outdir).replace(
  633. artifacts_formats_prefix, format_artifact_name()))
  634. source: str = os.path.relpath(attr[0])
  635. target_dir: str = os.path.dirname(target)
  636. print(f'Copying file {source} to {target}')
  637. try:
  638. os.makedirs(target_dir, exist_ok=True)
  639. shutil.copyfile(source, target, follow_symlinks=True)
  640. except Exception as ex:
  641. Logger.error(f"Error while copying {source} to {target}\nContent of {target_dir}:\n{Logger.NEWLINE_CHAR.join(get_file_list(os.path.dirname(attr[0].rstrip())))}")
  642. raise
  643. def delete_folder(path):
  644. '''Remov Read Only Files'''
  645. for root, dirs, files in os.walk(path, topdown=True):
  646. for dir in dirs:
  647. fulldirpath = os.path.join(root, dir)
  648. Logger.debug(f'Drilling down in {fulldirpath}')
  649. delete_folder(fulldirpath)
  650. for fname in files:
  651. full_path = os.path.join(root, fname)
  652. Logger.debug(f'Setting file read/write {full_path}')
  653. os.chmod(full_path, stat.S_IWRITE)
  654. Logger.debug(f'Deleting file {full_path}')
  655. os.remove(full_path)
  656. if os.path.exists(path):
  657. Logger.debug(f'Changing folder read/write {path}')
  658. os.chmod(path, stat.S_IWRITE)
  659. print(f'WARNING: Deleting Folder {path}')
  660. os.rmdir(path)
  661. def get_file_stats(path):
  662. fstat: os.stat_result = pathlib.Path(path).stat()
  663. # Convert file size to MB, KB or Bytes
  664. mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
  665. if (fstat.st_size > 1024 * 1024):
  666. return math.ceil(fstat.st_size / (1024 * 1024)), "MB", mtime
  667. elif (fstat.st_size > 1024):
  668. return math.ceil(fstat.st_size / 1024), "KB", mtime
  669. return fstat.st_size, "B", mtime
  670. def get_file_list(root_path, max_levels: int = 2) -> list:
  671. outlist: list = []
  672. for root, dirs, files in os.walk(root_path):
  673. path = os.path.relpath(root).split(os.sep)
  674. if len(path) <= max_levels:
  675. outlist.append(f'\n{root}')
  676. for file in files:
  677. full_name = os.path.join(root, file)
  678. fsize, unit, mtime = get_file_stats(full_name)
  679. outlist.append('{:s} {:8d} {:2s} {:18s}\t{:s}'.format(
  680. len(path) * "---", fsize, unit, mtime, file))
  681. return outlist
  682. def get_recursive_list(path) -> list:
  683. outlist: list = []
  684. for root, dirs, files in os.walk(path, topdown=True):
  685. for fname in files:
  686. outlist.append((fname, os.path.join(root, fname)))
  687. return outlist
  688. def handle_manifest(args):
  689. set_workdir(args)
  690. print(f'Creating the web installer manifest')
  691. outdir: str = os.path.relpath(args.outdir)
  692. if not os.path.exists(outdir):
  693. print(f'Creating target folder {outdir}')
  694. os.makedirs(outdir, exist_ok=True)
  695. releases: Releases = Releases.get_releases(
  696. args.flash_file, args.max_count, args.manif_name)
  697. release: PlatformRelease
  698. for release in releases.get_all():
  699. manifest_name = release.get_manifest_name()
  700. release.get_zip_file()
  701. man = copy.deepcopy(manifest)
  702. man['manifest_name'] = manifest_name
  703. man['builds'][0]['parts'] = release.process_files(args.outdir)
  704. man['name'] = release.platform()
  705. man['version'] = release.release_details.version
  706. Logger.debug(f'Generated manifest: \n{json.dumps(man)}')
  707. fullpath = os.path.join(args.outdir, release.get_manifest_name())
  708. print(f'Writing manifest to {fullpath}')
  709. with open(fullpath, "w") as f:
  710. json.dump(man, f, indent=4)
  711. release.cleanup()
  712. mainmanifest = os.path.join(args.outdir, args.manif_name)
  713. print(f'Writing main manifest {mainmanifest}')
  714. with open(mainmanifest, 'w') as f:
  715. json.dump(releases.get_attributes(), f, indent=4)
  716. def get_new_file_names(manif_json) -> collections.OrderedDict():
  717. new_release_files: dict = collections.OrderedDict()
  718. for artifact in manif_json:
  719. for name in [f["name"] for f in artifact["bin_files"]]:
  720. new_release_files[name] = artifact
  721. new_release_files[artifact["manifest_name"]] = artifact["name"]
  722. return new_release_files
  723. def copy_no_overwrite(source: str, target: str):
  724. sfiles = os.listdir(source)
  725. for f in sfiles:
  726. source_file = os.path.join(source, f)
  727. target_file = os.path.join(target, f)
  728. if not os.path.exists(target_file):
  729. print(f'Copying {f} to target')
  730. shutil.copy(source_file, target_file)
  731. else:
  732. Logger.debug(f'Skipping existing file {f}')
  733. def get_changed_items(repo: Repository) -> Dict:
  734. changed_filemode_status_code: int = pygit2.GIT_FILEMODE_TREE
  735. original_status_dict: Dict[str, int] = repo.status()
  736. # transfer any non-filemode changes to a new dictionary
  737. status_dict: Dict[str, int] = {}
  738. for filename, code in original_status_dict.items():
  739. if code != changed_filemode_status_code:
  740. status_dict[filename] = code
  741. return status_dict
  742. def is_dirty(repo: Repository) -> bool:
  743. return len(get_changed_items(repo)) > 0
  744. def push_with_method(auth_method:str,token:str,remote: Remote,reference):
  745. success:bool = False
  746. try:
  747. remote.push(reference, callbacks=RemoteCallbacks(pygit2.UserPass(auth_method, token)))
  748. success=True
  749. except Exception as ex:
  750. Logger.error(f'Error pushing with auth method {auth_method}: {ex}.')
  751. return success
  752. def push_if_change(repo: Repository, token: str, source_path: str, manif_json):
  753. if is_dirty(repo):
  754. print(f'Changes found. Preparing commit')
  755. env = AttributeDict(os.environ)
  756. index: Index = repo.index
  757. index.add_all()
  758. index.write()
  759. reference = repo.head.name
  760. message = f'Web installer for {format_artifact_from_manifest(manif_json)}'
  761. tree = index.write_tree()
  762. Releases.load_repository(source_path)
  763. commit = repo.create_commit(reference, Releases.get_last_author(
  764. ), Releases.get_last_committer(), message, tree, [repo.head.target])
  765. origin: Remote = repo.remotes['origin']
  766. print(
  767. f'Pushing commit {format_commit(repo[commit])} to url {origin.url}')
  768. remote: Remote = repo.remotes['origin']
  769. auth_methods = ['x-access-token','x-oauth-basic']
  770. for method in auth_methods:
  771. if push_with_method(method, token, remote, [reference]):
  772. print(f'::notice Web installer updated for {format_artifact_from_manifest(manif_json)}')
  773. return
  774. raise Exception('Unable to push web installer.')
  775. else:
  776. print(f'WARNING: No change found. Skipping update')
  777. def update_files(target_artifacts: str, manif_json, source: str):
  778. new_list: dict = get_new_file_names(manif_json)
  779. if os.path.exists(target_artifacts):
  780. print(f'Removing obsolete files from {target_artifacts}')
  781. for entry in get_recursive_list(target_artifacts):
  782. f = entry[0]
  783. full_target = entry[1]
  784. if f not in new_list.keys():
  785. print(f'WARNING: Removing obsolete file {f}')
  786. os.remove(full_target)
  787. else:
  788. print(f'Creating target folder {target_artifacts}')
  789. os.makedirs(target_artifacts, exist_ok=True)
  790. print(f'Copying installer files to {target_artifacts}:')
  791. copy_no_overwrite(os.path.abspath(source), target_artifacts)
  792. def handle_pushinstaller(args):
  793. set_workdir(args)
  794. print('Pushing web installer updates... ')
  795. target_artifacts = os.path.join(args.target, args.artifacts)
  796. if os.path.exists(args.target):
  797. print(f'Removing files (if any) from {args.target}')
  798. delete_folder(args.target)
  799. print(f'Cloning from {args.url} into {args.target}')
  800. repo = pygit2.clone_repository(args.url, args.target)
  801. repo.checkout_head()
  802. manif_json = parse_json(os.path.join(args.source, args.manif_name))
  803. update_files(target_artifacts, manif_json, args.source)
  804. push_if_change(repo, args.token, args.cwd, manif_json)
  805. repo.state_cleanup()
  806. def handle_show(args):
  807. print('Show')
  808. def extract_files_from_archive(url):
  809. tempfolder = tempfile.mkdtemp()
  810. platform:Response = requests.get(url)
  811. Logger.debug(f'Downloading {url} to {tempfolder}')
  812. Logger.debug(f'Transfer status code: {platform.status_code}. Expanding content')
  813. z = zipfile.ZipFile(io.BytesIO(platform.content))
  814. z.extractall(tempfolder)
  815. return tempfolder
  816. def handle_list_files(args):
  817. print(f'Content of {args.cwd}:')
  818. print(Logger.NEWLINE_CHAR.join(get_file_list(args.cwd)))
  819. def handle_commits(args):
  820. set_workdir(args)
  821. print(Releases.get_commit_list_descriptions())
  822. parser_environment.set_defaults(func=handle_environment, cmd='environment')
  823. parser_manifest.set_defaults(func=handle_manifest, cmd='manifest')
  824. parser_pushinstaller.set_defaults(func=handle_pushinstaller, cmd='installer')
  825. parser_show.set_defaults(func=handle_show, cmd='show')
  826. parser_build_flags.set_defaults(func=handle_build_flags, cmd='build_flags')
  827. parser_dir.set_defaults(func=handle_list_files, cmd='list_files')
  828. parser_commits.set_defaults(func=handle_commits,cmd='list_commits')
  829. def main():
  830. exit_result_code = 0
  831. args = parser.parse_args()
  832. Logger.with_crlf = args.with_crlf
  833. print(f'::group::{args.command}')
  834. print(f'build_tools version : {tool_version}')
  835. print(f'Processing command {args.command}')
  836. func: Callable = getattr(args, 'func', None)
  837. if func is not None:
  838. # Call whatever subcommand function was selected
  839. e: Exception
  840. try:
  841. func(args)
  842. except Exception as e:
  843. Logger.error(f'Critical error while running {args.command}\n{" ".join(traceback.format_exception(etype=type(e), value=e, tb=e.__traceback__))}')
  844. exit_result_code = 1
  845. else:
  846. # No subcommand was provided, so call help
  847. parser.print_usage()
  848. print(f'::endgroup::')
  849. sys.exit(exit_result_code)
  850. if __name__ == '__main__':
  851. main()