build_tools.py 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773
  1. #!/usr/bin/env python
  2. from json import JSONDecodeError
  3. import math
  4. import pathlib
  5. import time
  6. from typing import Callable,Dict, Union
  7. import pkg_resources
  8. import sys
  9. import os
  10. import io
  11. from os import stat_result, walk
  12. try:
  13. import argparse
  14. import collections
  15. import copy
  16. import enum
  17. import glob
  18. import json
  19. import logging
  20. import re
  21. import shutil
  22. import stat
  23. import tempfile
  24. import zipfile
  25. from ast import literal_eval
  26. from collections import namedtuple
  27. from datetime import datetime, timedelta, timezone
  28. from json import JSONDecoder
  29. from operator import contains
  30. from platform import platform, release
  31. from pydoc import describe
  32. from time import strftime
  33. from typing import OrderedDict
  34. from urllib import response
  35. from urllib.parse import urlparse
  36. from urllib.request import Request
  37. from webbrowser import get
  38. import pygit2
  39. from pygit2 import Commit,Repository,GitError,Reference,UserPass,Index,Signature,RemoteCallbacks, Remote
  40. import requests
  41. from genericpath import isdir
  42. except ImportError as ex:
  43. print(f'::error Failed importing module {ex.name}, using interpreter {sys.executable}. \n Installed packages:')
  44. installed_packages = pkg_resources.working_set
  45. installed_packages_list = sorted(["%s==%s" % (i.key, i.version) for i in installed_packages])
  46. print('\n'.join(installed_packages_list))
  47. print(f'Environment: ')
  48. envlist="\n".join( [f"{k}={v}" for k,v in sorted(os.environ.items())])
  49. print(f'{envlist}')
  50. raise
  51. tool_version= "1.0.6"
  52. FORMAT = '%(asctime)s %(message)s'
  53. logging.basicConfig(format=FORMAT)
  54. logger:logging.Logger = logging.getLogger(__name__)
  55. github_env= type('', (), {})()
  56. manifest={
  57. "name": "",
  58. "version": "",
  59. "home_assistant_domain": "slim_player",
  60. "funding_url": "https://esphome.io/guides/supporters.html",
  61. "builds": [
  62. {
  63. "chipFamily": "ESP32",
  64. "parts": [
  65. ]
  66. }
  67. ]
  68. }
  69. artifacts_formats_outdir= '$OUTDIR'
  70. artifacts_formats_prefix= '$PREFIX'
  71. artifacts_formats = [
  72. ['build/squeezelite.bin', '$OUTDIR/$PREFIX-squeezelite.bin'],
  73. ['build/recovery.bin', '$OUTDIR/$PREFIX-recovery.bin'],
  74. ['build/ota_data_initial.bin', '$OUTDIR/$PREFIX-ota_data_initial.bin'],
  75. ['build/bootloader/bootloader.bin', '$OUTDIR/$PREFIX-bootloader.bin'],
  76. ['build/partition_table/partition-table.bin ', '$OUTDIR/$PREFIX-partition-table.bin'],
  77. ]
  78. class AttributeDict(dict):
  79. __slots__ = ()
  80. def __getattr__(self, name:str):
  81. try:
  82. return self[name.upper()]
  83. except Exception:
  84. try:
  85. return self[name.lower()]
  86. except Exception:
  87. for attr in self.keys():
  88. if name.lower() == attr.replace("'","").lower() :
  89. return self[attr]
  90. __setattr__ = dict.__setitem__
  91. parser = argparse.ArgumentParser(description='Handles some parts of the squeezelite-esp32 build process')
  92. parser.add_argument('--cwd', type=str,help='Working directory', default=os.getcwd())
  93. parser.add_argument('--loglevel', type=str,choices={'CRITICAL','ERROR','WARNING','INFO','DEBUG','NOTSET'}, help='Logging level', default='INFO')
  94. subparsers = parser.add_subparsers( dest='command', required=True)
  95. parser_dir = subparsers.add_parser("list_files",
  96. add_help=False,
  97. description="List Files parser",
  98. help="Display the content of the folder")
  99. parser_manifest = subparsers.add_parser("manifest",
  100. add_help=False,
  101. description="Manifest parser",
  102. help="Handles the web installer manifest creation")
  103. parser_manifest.add_argument('--flash_file', required=True, type=str,help='The file path which contains the firmware flashing definition')
  104. parser_manifest.add_argument('--max_count', type=int,help='The maximum number of releases to keep', default=3)
  105. parser_manifest.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  106. parser_manifest.add_argument('--outdir', required=True,type=str,help='Output directory for files and manifests')
  107. parser_artifacts = subparsers.add_parser("artifacts",
  108. add_help=False,
  109. description="Artifacts parser",
  110. help="Handles the creation of artifacts files")
  111. parser_artifacts.add_argument('--outdir', type=str,help='Output directory for artifact files', default='./artifacts/')
  112. parser_pushinstaller = subparsers.add_parser("pushinstaller",
  113. add_help=False,
  114. description="Web Installer Checkout parser",
  115. help="Handles the creation of artifacts files")
  116. parser_pushinstaller.add_argument('--target', type=str,help='Output directory for web installer repository', default='./web_installer/')
  117. parser_pushinstaller.add_argument('--artifacts', type=str,help='Target subdirectory for web installer artifacts', default='./web_installer/')
  118. parser_pushinstaller.add_argument('--source', type=str,help='Source directory for the installer artifacts', default='./web_installer/')
  119. parser_pushinstaller.add_argument('--url', type=str,help='Web Installer clone url ', default='https://github.com/sle118/squeezelite-esp32-installer.git')
  120. parser_pushinstaller.add_argument('--web_installer_branch', type=str,help='Web Installer branch to use ', default='main')
  121. parser_pushinstaller.add_argument('--token', type=str,help='Auth token for pushing changes')
  122. parser_pushinstaller.add_argument('--flash_file', type=str,help='Manifest json file path')
  123. parser_pushinstaller.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  124. parser_environment = subparsers.add_parser("environment",
  125. add_help=False,
  126. description="Environment parser",
  127. help="Updates the build environment")
  128. parser_environment.add_argument('--env_file', type=str,help='Environment File', default=os.environ.get('GITHUB_ENV'))
  129. parser_environment.add_argument('--build', required=True, type=int,help='The build number')
  130. parser_environment.add_argument('--node', required=True, type=str,help='The matrix node being built')
  131. parser_environment.add_argument('--depth', required=True, type=int,help='The bit depth being built')
  132. parser_environment.add_argument('--major', type=str,help='Major version', default='2')
  133. parser_environment.add_argument('--docker', type=str,help='Docker image to use',default='sle118/squeezelite-esp32-idfv43')
  134. parser_show = subparsers.add_parser("show",
  135. add_help=False,
  136. description="Show parser",
  137. help="Show the build environment")
  138. parser_build_flags = subparsers.add_parser("build_flags",
  139. add_help=False,
  140. description="Build Flags",
  141. help="Updates the build environment with build flags")
  142. parser_build_flags.add_argument('--mock', action='store_true',help='Mock release')
  143. parser_build_flags.add_argument('--force', action='store_true',help='Force a release build')
  144. parser_build_flags.add_argument('--ui_build', action='store_true',help='Include building the web UI')
  145. def format_commit(commit):
  146. #463a9d8b7 Merge branch 'bugfix/ci_deploy_tags_v4.0' into 'release/v4.0' (2020-01-11T14:08:55+08:00)
  147. dt = datetime.fromtimestamp(float(commit.author.time), timezone( timedelta(minutes=commit.author.offset) ))
  148. timestr = dt.strftime('%c%z')
  149. cmesg= commit.message.replace('\n', ' ' )
  150. return f'{commit.short_id} {cmesg} ({timestr}) <{commit.author.name}>'.replace(' ', ' ', )
  151. def get_github_data(repo:Repository,api):
  152. base_url = urlparse(repo.remotes['origin'].url)
  153. url = f"https://api.github.com/repos{base_url.path.split('.')[-2]}/{api}"
  154. resp= requests.get(url, headers={"Content-Type": "application/vnd.github.v3+json"})
  155. return json.loads(resp.text)
  156. def dump_directory(dir_path):
  157. # list to store files name
  158. res = []
  159. for (dir_path, dir_names, file_names) in walk(dir_path):
  160. res.extend(file_names)
  161. print(res)
  162. class ReleaseDetails():
  163. version:str
  164. idf:str
  165. platform:str
  166. branch:str
  167. bitrate:str
  168. def __init__(self,tag:str) -> None:
  169. self.version,self.idf,self.platform,self.branch=tag.split('#')
  170. try:
  171. self.version,self.bitrate = self.version.split('-')
  172. except Exception:
  173. pass
  174. def get_attributes(self):
  175. return {
  176. 'version': self.version,
  177. 'idf': self.idf,
  178. 'platform': self.platform,
  179. 'branch': self.branch,
  180. 'bitrate': self.bitrate
  181. }
  182. def format_prefix(self)->str:
  183. return f'{self.branch}-{self.platform}-{self.version}'
  184. def get_full_platform(self):
  185. return f"{self.platform}{f'-{self.bitrate}' if self.bitrate is not None else ''}"
  186. class BinFile():
  187. name:str
  188. offset:int
  189. source_full_path:str
  190. target_name:str
  191. target_fullpath:str
  192. artifact_relpath:str
  193. def __init__(self, source_path,file_build_path:str, offset:int,release_details:ReleaseDetails,build_dir) -> None:
  194. self.name = os.path.basename(file_build_path).rstrip()
  195. self.artifact_relpath = os.path.relpath(file_build_path,build_dir).rstrip()
  196. self.source_path = source_path
  197. self.source_full_path = os.path.join(source_path,file_build_path).rstrip()
  198. self.offset = offset
  199. self.target_name= f'{release_details.format_prefix()}-{self.name}'.rstrip()
  200. def get_manifest(self):
  201. return { "path": self.target_name , "offset": self.offset }
  202. def copy(self,target_folder)->str:
  203. self.target_fullpath=os.path.join(target_folder,self.target_name)
  204. logger.debug(f'file {self.source_full_path} will be copied to {self.target_fullpath}')
  205. try:
  206. os.makedirs(target_folder, exist_ok=True)
  207. shutil.copyfile(self.source_full_path, self.target_fullpath, follow_symlinks=True)
  208. except Exception as ex:
  209. print(f'::error Error while copying {self.source_full_path} to {self.target_fullpath}' )
  210. print(f'::error Content of {os.path.dirname(self.source_full_path.rstrip())}:')
  211. print('\n::error '.join(get_file_list(os.path.dirname(self.source_full_path.rstrip()))))
  212. raise
  213. return self.target_fullpath
  214. def get_attributes(self):
  215. return {
  216. 'name':self.target_name,
  217. 'offset':self.offset,
  218. 'artifact_relpath':self.artifact_relpath
  219. }
  220. class PlatformRelease():
  221. name:str
  222. description:str
  223. url:str=''
  224. zipfile:str=''
  225. tempfolder:str
  226. release_details:ReleaseDetails
  227. flash_parms={}
  228. build_dir:str
  229. has_artifacts:bool
  230. branch:str
  231. assets:list
  232. bin_files:list
  233. name_prefix:str
  234. def get_manifest_name(self)->str:
  235. return f'{self.name_prefix}-{self.release_details.format_prefix()}.json'
  236. def __init__(self,git_release,flash_parms,build_dir, branch,name_prefix) -> None:
  237. self.name = git_release.tag_name
  238. self.description=git_release.body
  239. self.assets = git_release['assets']
  240. self.has_artifacts = False
  241. self.name_prefix = name_prefix
  242. if len(self.assets)>0:
  243. if self.has_asset_type():
  244. self.url=self.get_asset_from_extension().browser_download_url
  245. if self.has_asset_type('.zip'):
  246. self.zipfile=self.get_asset_from_extension(ext='.zip').browser_download_url
  247. self.has_artifacts = True
  248. self.release_details=ReleaseDetails(git_release.name)
  249. self.bin_files = list()
  250. self.flash_parms = flash_parms
  251. self.build_dir = build_dir
  252. self.branch = branch
  253. def process_files(self,outdir:str)->list:
  254. parts = []
  255. for f in self.bin_files:
  256. f.copy(outdir)
  257. parts.append(f.get_manifest())
  258. def get_asset_from_extension(self,ext='.bin'):
  259. for a in self.assets:
  260. filename=AttributeDict(a).name
  261. file_name, file_extension = os.path.splitext(filename)
  262. if file_extension == ext:
  263. return AttributeDict(a)
  264. return None
  265. def has_asset_type(self,ext='.bin')->bool:
  266. return self.get_asset_from_extension(ext) is not None
  267. def platform(self):
  268. return self.release_details.get_full_platform()
  269. def get_zip_file(self):
  270. self.tempfolder = extract_files_from_archive(self.zipfile)
  271. logger.info(f'Artifacts for {self.name} extracted to {self.tempfolder}')
  272. try:
  273. for artifact in artifacts_formats:
  274. base_name = os.path.basename(artifact[0]).rstrip().lstrip()
  275. self.bin_files.append(BinFile(self.tempfolder,artifact[0],self.flash_parms[base_name],self.release_details,self.build_dir))
  276. has_artifacts = True
  277. except Exception:
  278. self.has_artifacts = False
  279. def cleanup(self):
  280. logger.info(f'removing {self.name} temp directory {self.tempfolder}')
  281. shutil.rmtree(self.tempfolder)
  282. def get_attributes(self):
  283. return {
  284. 'name':self.name,
  285. 'branch':self.branch,
  286. 'description':self.description,
  287. 'url':self.url,
  288. 'zipfile':self.zipfile,
  289. 'release_details':self.release_details.get_attributes(),
  290. 'bin_files': [b.get_attributes() for b in self.bin_files],
  291. 'manifest_name': self.get_manifest_name()
  292. }
  293. class Releases():
  294. _dict:dict = collections.OrderedDict()
  295. maxcount:int =0
  296. branch:str=''
  297. repo:Repository=None
  298. last_commit:Commit = None
  299. manifest_name:str
  300. def __init__(self,branch:str,maxcount:int=3) -> None:
  301. self.maxcount = maxcount
  302. self.branch = branch
  303. def count(self,value:PlatformRelease)->int:
  304. content=self._dict.get(value.platform())
  305. if content == None:
  306. return 0
  307. return len(content)
  308. def get_platform(self,platform:str)->list:
  309. return self._dict[platform]
  310. def get_platform_keys(self):
  311. return self._dict.keys()
  312. def get_all(self)->list:
  313. result:list=[]
  314. for platform in [self.get_platform(platform) for platform in self.get_platform_keys()]:
  315. for release in platform:
  316. result.append(release)
  317. return result
  318. def append(self,value:PlatformRelease):
  319. # optional processing here
  320. if self.count(value) == 0:
  321. self._dict[value.platform()] = []
  322. if self.should_add(value):
  323. logger.info(f'Adding release {value.name} to the list')
  324. self._dict[value.platform()].append(value)
  325. else:
  326. logger.info(f'Skipping release {value.name}')
  327. def get_attributes(self):
  328. res = []
  329. release:PlatformRelease
  330. for release in self.get_all():
  331. res.append(release.get_attributes())
  332. return res
  333. def get_minlen(self)->int:
  334. return min([len(self.get_platform(p)) for p in self.get_platform_keys()])
  335. def got_all_packages(self)->bool:
  336. return self.get_minlen() >=self.maxcount
  337. def should_add(self,release:PlatformRelease)->bool:
  338. return self.count(release) <=self.maxcount
  339. def add_package(self,package:PlatformRelease, with_artifacts:bool=True):
  340. if self.branch != package.branch:
  341. logger.info(f'Skipping release {package.name} from branch {package.branch}')
  342. elif package.has_artifacts or not with_artifacts:
  343. self.append(package)
  344. @classmethod
  345. def get_last_commit_message(cls)->str:
  346. last:Commit = cls.get_last_commit()
  347. if last is None:
  348. return ''
  349. else:
  350. return last.message.replace('\n', ' ')
  351. @classmethod
  352. def get_last_commit(cls)->Commit:
  353. if cls.repo is None:
  354. cls.get_repository(os.getcwd())
  355. head:Reference = cls.repo.head
  356. target=head.target
  357. ref:Reference
  358. if cls.last_commit is None:
  359. try:
  360. cls.last_commit=cls.repo[target]
  361. logger.info(f'Last commit for {head.shorthand} is {format_commit(cls.last_commit)}')
  362. except Exception as e:
  363. print(f'::error Unable to retrieve last commit for {head.shorthand}/{target}: {e}')
  364. cls.last_commit=None
  365. return cls.last_commit
  366. @classmethod
  367. def get_repository(cls,path:str=os.getcwd())->Repository:
  368. if cls.repo is None:
  369. try:
  370. logger.info(f'Opening repository from {path}')
  371. cls.repo=Repository(path=path)
  372. except GitError as ex:
  373. print(f'::error Unable to access the repository.')
  374. print(f'::error Content of {path}:')
  375. print('\n::error '.join(get_file_list(path,1)))
  376. raise
  377. return cls.repo
  378. @classmethod
  379. def resolve_commit(cls,repo:Repository,commit_id:str)->Commit:
  380. commit:Commit
  381. reference:Reference
  382. commit, reference = repo.resolve_refish(commit_id)
  383. return commit
  384. @classmethod
  385. def get_release_branch(cls,repo:Repository,platform_release)->str:
  386. match = [t for t in repo.branches.with_commit(platform_release.target_commitish)]
  387. no_origin = [t for t in match if 'origin' not in t]
  388. if len(no_origin) == 0 and len(match) > 0:
  389. return match[0].split('/')[1]
  390. elif len(no_origin) >0:
  391. return no_origin[0]
  392. return ''
  393. @classmethod
  394. def get_flash_parms(cls,file_path):
  395. flash = parse_json(file_path)
  396. od:collections.OrderedDict = collections.OrderedDict()
  397. for z in flash['flash_files'].items():
  398. base_name:str = os.path.basename(z[1])
  399. od[base_name.rstrip().lstrip()] = literal_eval( z[0])
  400. return collections.OrderedDict(sorted(od.items()))
  401. @classmethod
  402. def get_releases(cls,flash_file_path,maxcount:int,name_prefix):
  403. repo=Releases.get_repository(os.getcwd())
  404. flash_parms = Releases.get_flash_parms(flash_file_path)
  405. packages:Releases = cls(branch=repo.head.shorthand,maxcount=maxcount)
  406. build_dir=os.path.dirname(flash_file_path)
  407. for page in range(1,999):
  408. logger.debug(f'Getting releases page {page}')
  409. releases = get_github_data(repo,f'releases?per_page=50&page={page}')
  410. if len(releases)==0:
  411. logger.debug(f'No more release found for page {page}')
  412. break
  413. for release_entry in [AttributeDict(platform) for platform in releases]:
  414. packages.add_package(PlatformRelease(release_entry,flash_parms,build_dir,Releases.get_release_branch(repo,release_entry),name_prefix))
  415. if packages.got_all_packages():
  416. break
  417. if packages.got_all_packages():
  418. break
  419. return packages
  420. @classmethod
  421. def get_commit_list(cls)->list:
  422. commit_list = []
  423. last:Commit = Releases.get_last_commit()
  424. if last is None:
  425. return commit_list
  426. try:
  427. for c in Releases.get_repository().walk(last.id,pygit2.GIT_SORT_TIME):
  428. commit_list.append(format_commit(c))
  429. if len(commit_list)>10:
  430. break
  431. except Exception as e:
  432. print(f'::error Unable to get commit list starting at {last.id}: {e}')
  433. return commit_list
  434. @classmethod
  435. def get_commit_list_descriptions(cls)->str:
  436. return '<<~EOD\n### Revision Log<br>\n'+'<br>\n'.join(cls.get_commit_list())+'\n~EOD'
  437. def update(self, *args, **kwargs):
  438. if args:
  439. if len(args) > 1:
  440. raise TypeError("update expected at most 1 arguments, "
  441. "got %d" % len(args))
  442. other = dict(args[0])
  443. for key in other:
  444. self[key] = other[key]
  445. for key in kwargs:
  446. self[key] = kwargs[key]
  447. def setdefault(self, key, value=None):
  448. if key not in self:
  449. self[key] = value
  450. return self[key]
  451. def set_workdir(args):
  452. logger.info(f'setting work dir to: {args.cwd}')
  453. os.chdir(os.path.abspath(args.cwd))
  454. def parse_json(filename:str):
  455. fname = os.path.abspath(filename)
  456. folder:str = os.path.abspath(os.path.dirname(filename))
  457. logger.info(f'Opening json file {fname} from {folder}')
  458. try:
  459. with open(fname) as f:
  460. content=f.read()
  461. logger.debug(f'Loading json\n{content}')
  462. return json.loads(content)
  463. except JSONDecodeError as ex:
  464. print(f'::error Error parsing {content}')
  465. except Exception as ex:
  466. print(f'::error Unable to parse flasher args json file. Content of {folder}:')
  467. print('\n::error '.join(get_file_list(folder)))
  468. raise
  469. def write_github_env(args):
  470. logger.info(f'Writing environment details to {args.env_file}...')
  471. with open(args.env_file, "w") as env_file:
  472. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  473. line=f'{attr}{"=" if attr != "description" else ""}{getattr(github_env,attr)}'
  474. print(line)
  475. env_file.write(f'{line}\n')
  476. os.environ[attr] = str(getattr(github_env,attr))
  477. logger.info(f'Done writing environment details to {args.env_file}!')
  478. def set_workflow_output(args):
  479. logger.info(f'Outputting job variables ...')
  480. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  481. # use print instead of logger, as we need the raw output without the date/time prefix from logging
  482. print(f'::set-output name={attr}::{getattr(github_env,attr)}')
  483. os.environ[attr] = str(getattr(github_env,attr))
  484. logger.info(f'Done outputting job variables!')
  485. def format_artifact_name(base_name:str='',args = AttributeDict(os.environ)):
  486. return f'{base_name}{args.branch_name}-{args.node}-{args.depth}-{args.major}{args.build}'
  487. def handle_build_flags(args):
  488. set_workdir(args)
  489. logger.info('Setting global build flags')
  490. last:Commit = Releases.get_last_commit()
  491. commit_message:str= Releases.get_last_commit_message()
  492. github_env.mock=1 if args.mock else 0
  493. github_env.release_flag=1 if args.mock or args.force or 'release' in commit_message.lower() else 0
  494. github_env.ui_build=1 if args.mock or args.ui_build or '[ui-build]' in commit_message.lower() or github_env.release_flag==1 else 0
  495. set_workflow_output(github_env)
  496. def handle_environment(args):
  497. set_workdir(args)
  498. logger.info('Setting environment variables...')
  499. commit_message:str= Releases.get_last_commit_message()
  500. last:Commit = Releases.get_last_commit()
  501. if last is not None:
  502. github_env.author_name=last.author.name
  503. github_env.author_email=last.author.email
  504. github_env.committer_name=last.committer.name
  505. github_env.committer_email=last.committer.email
  506. github_env.node=args.node
  507. github_env.depth=args.depth
  508. github_env.major=args.major
  509. github_env.build=args.build
  510. github_env.DEPTH=args.depth
  511. github_env.TARGET_BUILD_NAME=args.node
  512. github_env.build_version_prefix=args.major
  513. github_env.branch_name=re.sub('[^a-zA-Z0-9\-~!@_\.]', '', Releases.get_repository().head.shorthand)
  514. github_env.BUILD_NUMBER=str(args.build)
  515. github_env.tag=f'{args.node}.{args.depth}.{args.build}.{github_env.branch_name}'.rstrip()
  516. github_env.last_commit=commit_message
  517. github_env.DOCKER_IMAGE_NAME=args.docker
  518. github_env.name=f"{args.major}.{str(args.build)}-{args.depth}#v4.3#{args.node}#{github_env.branch_name}"
  519. github_env.artifact_prefix=format_artifact_name('squeezelite-esp32-',github_env)
  520. github_env.artifact_file_name=f"{github_env.artifact_prefix}.zip"
  521. github_env.artifact_bin_file_name=f"{github_env.artifact_prefix}.bin"
  522. github_env.PROJECT_VER=f'{args.node}-{ args.build }'
  523. github_env.description=Releases.get_commit_list_descriptions()
  524. write_github_env(args)
  525. def handle_artifacts(args):
  526. set_workdir(args)
  527. logger.info(f'Handling artifacts')
  528. for attr in artifacts_formats:
  529. target:str=os.path.relpath(attr[1].replace(artifacts_formats_outdir,args.outdir).replace(artifacts_formats_prefix,format_artifact_name()))
  530. source:str=os.path.relpath(attr[0])
  531. target_dir:str=os.path.dirname(target)
  532. logger.info(f'Copying file {source} to {target}')
  533. try:
  534. os.makedirs(target_dir, exist_ok=True)
  535. shutil.copyfile(source, target, follow_symlinks=True)
  536. except Exception as ex:
  537. print(f'::error Error while copying {source} to {target}' )
  538. print(f'::error Content of {target_dir}:')
  539. print('\n::error '.join(get_file_list(os.path.dirname(attr[0].rstrip()))))
  540. raise
  541. def delete_folder(path):
  542. '''Remov Read Only Files'''
  543. for root, dirs, files in os.walk(path,topdown=True):
  544. for dir in dirs:
  545. fulldirpath=os.path.join(root, dir)
  546. logger.debug(f'Drilling down in {fulldirpath}')
  547. delete_folder(fulldirpath)
  548. for fname in files:
  549. full_path = os.path.join(root, fname)
  550. logger.debug(f'Setting file read/write {full_path}')
  551. os.chmod(full_path ,stat.S_IWRITE)
  552. logger.debug(f'Deleting file {full_path}')
  553. os.remove(full_path)
  554. if os.path.exists(path):
  555. logger.debug(f'Changing folder read/write {path}')
  556. os.chmod(path ,stat.S_IWRITE)
  557. logger.warning(f'Deleting Folder {path}')
  558. os.rmdir(path)
  559. def get_file_stats(path):
  560. fstat:os.stat_result = pathlib.Path(path).stat()
  561. # Convert file size to MB, KB or Bytes
  562. mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
  563. if (fstat.st_size > 1024 * 1024):
  564. return math.ceil(fstat.st_size / (1024 * 1024)), "MB", mtime
  565. elif (fstat.st_size > 1024):
  566. return math.ceil(fstat.st_size / 1024), "KB", mtime
  567. return fstat.st_size, "B", mtime
  568. def get_file_list(root_path, max_levels:int=2 )->list:
  569. outlist:list=[]
  570. for root, dirs, files in os.walk(root_path):
  571. path = root.split(os.sep)
  572. if len(path) <= max_levels:
  573. outlist.append(f'\n{root}')
  574. for file in files:
  575. full_name=os.path.join(root, file)
  576. fsize,unit,mtime = get_file_stats(full_name)
  577. outlist.append('{:s} {:8d} {:2s} {:18s}\t{:s}'.format(len(path) * "---",fsize,unit,mtime,file))
  578. return outlist
  579. def get_recursive_list(path)->list:
  580. outlist:list=[]
  581. for root, dirs, files in os.walk(path,topdown=True):
  582. for fname in files:
  583. outlist.append((fname,os.path.join(root,fname)))
  584. return outlist
  585. def handle_manifest(args):
  586. set_workdir(args)
  587. logger.info(f'Creating the web installer manifest')
  588. env = AttributeDict(os.environ)
  589. if not os.path.exists(os.path.dirname(args.outdir)):
  590. logger.info(f'Creating target folder {args.outdir}')
  591. os.makedirs(args.outdir, exist_ok=True)
  592. releases:Releases = Releases.get_releases(args.flash_file, args.max_count,args.manif_name)
  593. release:PlatformRelease
  594. for release in releases.get_all():
  595. release.get_zip_file()
  596. man = copy.deepcopy(manifest)
  597. man['manifest_name'] = release.get_manifest_name()
  598. man['builds'][0]['parts'] = release.process_files(args.outdir)
  599. man['name'] = release.platform()
  600. man['version'] = release.release_details.version
  601. logger.debug(f'Generated manifest: \n{json.dumps(man,indent=4)}')
  602. fullpath=os.path.join(args.outdir,release.get_manifest_name())
  603. logger.info(f'Writing manifest to {fullpath}')
  604. with open(fullpath, "w") as f:
  605. json.dump(man,f,indent=4)
  606. release.cleanup()
  607. mainmanifest=os.path.join(args.outdir,args.manif_name)
  608. logger.info(f'Writing main manifest {mainmanifest}')
  609. with open(mainmanifest,'w') as f:
  610. json.dump(releases.get_attributes(),f,indent=4)
  611. def get_new_file_names(manifest:str,source:str)->collections.OrderedDict():
  612. artifacts = parse_json(os.path.join(source,manifest))
  613. new_release_files:dict = collections.OrderedDict()
  614. for artifact in artifacts:
  615. for name in [f["name"] for f in artifact["bin_files"]]:
  616. new_release_files[name] = artifact
  617. new_release_files[artifact['manifest_name']] = artifact['name']
  618. return new_release_files
  619. def copy_no_overwrite(source:str,target:str) :
  620. sfiles = os.listdir(source)
  621. for f in sfiles:
  622. source_file = os.path.join(source,f)
  623. target_file = os.path.join(target,f)
  624. if not os.path.exists(target_file):
  625. logger.info(f'Copying {f} to target')
  626. shutil.copy(source_file, target_file)
  627. else:
  628. logger.debug(f'Skipping existing file {f}')
  629. def get_changed_items(repo:Repository)->Dict:
  630. changed_filemode_status_code: int = pygit2.GIT_FILEMODE_TREE
  631. original_status_dict: Dict[str, int] = repo.status()
  632. # transfer any non-filemode changes to a new dictionary
  633. status_dict: Dict[str, int] = {}
  634. for filename, code in original_status_dict.items():
  635. if code != changed_filemode_status_code:
  636. status_dict[filename] = code
  637. return status_dict
  638. def is_dirty(repo:Repository)->bool:
  639. return len(get_changed_items(repo)) > 0
  640. def push_if_change(repo:Repository, token:str):
  641. if is_dirty(repo):
  642. logger.info(f'Changes found. Preparing commit')
  643. env = AttributeDict(os.environ)
  644. index:Index = repo.index
  645. index.add_all()
  646. index.write()
  647. reference=repo.head.name
  648. author = Signature(env.author_name,env.author_email)
  649. committer = Signature(env.committer_name, env.committer_email)
  650. message = f'Web installer for {format_artifact_name()}'
  651. tree = index.write_tree()
  652. commit = repo.create_commit(reference, author, committer, message, tree,[repo.head.target])
  653. origin:Remote=repo.remotes['origin']
  654. logger.info(f'Pushing commit {format_commit(repo[commit])} to url {origin.url}')
  655. credentials = UserPass(token, 'x-oauth-basic') # passing credentials
  656. remote:Remote = repo.remotes['origin']
  657. remote.credentials = credentials
  658. remote.push([reference],callbacks= RemoteCallbacks(UserPass(token, 'x-oauth-basic')))
  659. else:
  660. logger.warning(f'No change found. Skipping update')
  661. def update_files(target_artifacts:str,manif_name:str,source:str):
  662. new_list:dict = get_new_file_names(manif_name, os.path.abspath(source))
  663. if os.path.exists(target_artifacts):
  664. logger.info(f'Removing obsolete files from {target_artifacts}')
  665. for entry in get_recursive_list(target_artifacts):
  666. f=entry[0]
  667. full_target=entry[1]
  668. if f not in new_list.keys():
  669. logger.warning(f'Removing obsolete file {f}')
  670. os.remove(full_target)
  671. else:
  672. logger.info(f'Creating target folder {target_artifacts}')
  673. os.makedirs(target_artifacts, exist_ok=True)
  674. logger.info(f'Copying installer files to {target_artifacts}:')
  675. copy_no_overwrite(os.path.abspath(source), target_artifacts)
  676. def handle_pushinstaller(args):
  677. set_workdir(args)
  678. logger.info('Pushing web installer updates... ')
  679. target_artifacts = os.path.join(args.target,args.artifacts)
  680. if os.path.exists(args.target):
  681. logger.info(f'Removing files (if any) from {args.target}')
  682. delete_folder(args.target)
  683. logger.info(f'Cloning from {args.url} into {args.target}')
  684. repo = pygit2.clone_repository(args.url,args.target)
  685. repo.checkout_head()
  686. update_files(target_artifacts,args.manif_name,args.source)
  687. push_if_change(repo,args.token)
  688. repo.state_cleanup()
  689. def handle_show(args):
  690. logger.info('Show')
  691. def extract_files_from_archive(url):
  692. tempfolder= tempfile.mkdtemp()
  693. platform = requests.get(url)
  694. z = zipfile.ZipFile(io.BytesIO(platform.content))
  695. z.extractall(tempfolder)
  696. return tempfolder
  697. def handle_list_files(args):
  698. print(f'Content of {args.cwd}:')
  699. print('\n'.join(get_file_list(args.cwd)))
  700. parser_environment.set_defaults(func=handle_environment, cmd='environment')
  701. parser_artifacts.set_defaults(func=handle_artifacts, cmd='artifacts')
  702. parser_manifest.set_defaults(func=handle_manifest, cmd='manifest')
  703. parser_pushinstaller.set_defaults(func=handle_pushinstaller, cmd='installer')
  704. parser_show.set_defaults(func=handle_show, cmd='show')
  705. parser_build_flags.set_defaults(func=handle_build_flags, cmd='build_flags')
  706. parser_dir.set_defaults(func=handle_list_files, cmd='list_files')
  707. def main():
  708. args = parser.parse_args()
  709. logger.setLevel(logging.getLevelName(args.loglevel))
  710. logger.info(f'build_tools version : {tool_version}')
  711. logger.debug(f'Processing command {args.command}')
  712. func:Callable = getattr(args, 'func', None)
  713. if func is not None:
  714. # Call whatever subcommand function was selected
  715. func(args)
  716. else:
  717. # No subcommand was provided, so call help
  718. parser.print_usage()
  719. if __name__ == '__main__':
  720. main()