build_tools.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745
  1. #!/usr/bin/env python
  2. from json import JSONDecodeError
  3. import math
  4. import pathlib
  5. import time
  6. from typing import Callable,Dict, Union
  7. import pkg_resources
  8. import sys
  9. import os
  10. import io
  11. from os import stat_result, walk
  12. try:
  13. import argparse
  14. import collections
  15. import copy
  16. import enum
  17. import glob
  18. import json
  19. import logging
  20. import re
  21. import shutil
  22. import stat
  23. import tempfile
  24. import zipfile
  25. from ast import literal_eval
  26. from collections import namedtuple
  27. from datetime import datetime, timedelta, timezone
  28. from json import JSONDecoder
  29. from operator import contains
  30. from platform import platform, release
  31. from pydoc import describe
  32. from time import strftime
  33. from typing import OrderedDict
  34. from urllib import response
  35. from urllib.parse import urlparse
  36. from urllib.request import Request
  37. from webbrowser import get
  38. import pygit2
  39. from pygit2 import Commit,Repository,GitError,Reference,UserPass,Index,Signature,RemoteCallbacks, Remote
  40. import requests
  41. from genericpath import isdir
  42. except ImportError as ex:
  43. print(f'::error::Failed importing module {ex.name}, using interpreter {sys.executable}. \n Installed packages:')
  44. installed_packages = pkg_resources.working_set
  45. installed_packages_list = sorted(["%s==%s" % (i.key, i.version) for i in installed_packages])
  46. print('\n'.join(installed_packages_list))
  47. print(f'Environment: ')
  48. envlist="\n".join( [f"{k}={v}" for k,v in sorted(os.environ.items())])
  49. print(f'{envlist}')
  50. raise
  51. tool_version= "1.0.6"
  52. FORMAT = '%(asctime)s %(message)s'
  53. logging.basicConfig(format=FORMAT)
  54. logger:logging.Logger = logging.getLogger(__name__)
  55. github_env= type('', (), {})()
  56. manifest={
  57. "name": "",
  58. "version": "",
  59. "home_assistant_domain": "slim_player",
  60. "funding_url": "https://esphome.io/guides/supporters.html",
  61. "builds": [
  62. {
  63. "chipFamily": "ESP32",
  64. "parts": [
  65. ]
  66. }
  67. ]
  68. }
  69. artifacts_formats_outdir= '$OUTDIR'
  70. artifacts_formats_prefix= '$PREFIX'
  71. artifacts_formats = [
  72. ['build/squeezelite.bin', '$OUTDIR/$PREFIX-squeezelite.bin'],
  73. ['build/recovery.bin', '$OUTDIR/$PREFIX-recovery.bin'],
  74. ['build/ota_data_initial.bin', '$OUTDIR/$PREFIX-ota_data_initial.bin'],
  75. ['build/bootloader/bootloader.bin', '$OUTDIR/$PREFIX-bootloader.bin'],
  76. ['build/partition_table/partition-table.bin ', '$OUTDIR/$PREFIX-partition-table.bin'],
  77. ]
  78. class AttributeDict(dict):
  79. __slots__ = ()
  80. def __getattr__(self, name:str):
  81. try:
  82. return self[name.upper()]
  83. except Exception:
  84. try:
  85. return self[name.lower()]
  86. except Exception:
  87. for attr in self.keys():
  88. if name.lower() == attr.replace("'","").lower() :
  89. return self[attr]
  90. __setattr__ = dict.__setitem__
  91. parser = argparse.ArgumentParser(description='Handles some parts of the squeezelite-esp32 build process')
  92. parser.add_argument('--cwd', type=str,help='Working directory', default=os.getcwd())
  93. parser.add_argument('--loglevel', type=str,choices={'CRITICAL','ERROR','WARNING','INFO','DEBUG','NOTSET'}, help='Logging level', default='INFO')
  94. subparsers = parser.add_subparsers( dest='command', required=True)
  95. parser_dir = subparsers.add_parser("list_files",
  96. add_help=False,
  97. description="List Files parser",
  98. help="Display the content of the folder")
  99. parser_manifest = subparsers.add_parser("manifest",
  100. add_help=False,
  101. description="Manifest parser",
  102. help="Handles the web installer manifest creation")
  103. parser_manifest.add_argument('--flash_file', required=True, type=str,help='The file path which contains the firmware flashing definition')
  104. parser_manifest.add_argument('--max_count', type=int,help='The maximum number of releases to keep', default=3)
  105. parser_manifest.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  106. parser_manifest.add_argument('--outdir', required=True,type=str,help='Output directory for files and manifests')
  107. parser_artifacts = subparsers.add_parser("artifacts",
  108. add_help=False,
  109. description="Artifacts parser",
  110. help="Handles the creation of artifacts files")
  111. parser_artifacts.add_argument('--outdir', type=str,help='Output directory for artifact files', default='./artifacts/')
  112. parser_pushinstaller = subparsers.add_parser("pushinstaller",
  113. add_help=False,
  114. description="Web Installer Checkout parser",
  115. help="Handles the creation of artifacts files")
  116. parser_pushinstaller.add_argument('--target', type=str,help='Output directory for web installer repository', default='./web_installer/')
  117. parser_pushinstaller.add_argument('--artifacts', type=str,help='Target subdirectory for web installer artifacts', default='./web_installer/')
  118. parser_pushinstaller.add_argument('--source', type=str,help='Source directory for the installer artifacts', default='./web_installer/')
  119. parser_pushinstaller.add_argument('--url', type=str,help='Web Installer clone url ', default='https://github.com/sle118/squeezelite-esp32-installer.git')
  120. parser_pushinstaller.add_argument('--web_installer_branch', type=str,help='Web Installer branch to use ', default='main')
  121. parser_pushinstaller.add_argument('--token', type=str,help='Auth token for pushing changes')
  122. parser_pushinstaller.add_argument('--flash_file', type=str,help='Manifest json file path')
  123. parser_pushinstaller.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  124. parser_environment = subparsers.add_parser("environment",
  125. add_help=False,
  126. description="Environment parser",
  127. help="Updates the build environment")
  128. parser_environment.add_argument('--env_file', type=str,help='Environment File', default=os.environ.get('GITHUB_ENV'))
  129. parser_environment.add_argument('--build', required=True, type=int,help='The build number')
  130. parser_environment.add_argument('--node', required=True, type=str,help='The matrix node being built')
  131. parser_environment.add_argument('--depth', required=True, type=int,help='The bit depth being built')
  132. parser_environment.add_argument('--major', type=str,help='Major version', default='2')
  133. parser_environment.add_argument('--docker', type=str,help='Docker image to use',default='sle118/squeezelite-esp32-idfv43')
  134. parser_show = subparsers.add_parser("show",
  135. add_help=False,
  136. description="Show parser",
  137. help="Show the build environment")
  138. parser_build_flags = subparsers.add_parser("build_flags",
  139. add_help=False,
  140. description="Build Flags",
  141. help="Updates the build environment with build flags")
  142. parser_build_flags.add_argument('--mock', action='store_true',help='Mock release')
  143. parser_build_flags.add_argument('--force', action='store_true',help='Force a release build')
  144. parser_build_flags.add_argument('--ui_build', action='store_true',help='Include building the web UI')
  145. def get_github_data(repo:Repository,api):
  146. base_url = urlparse(repo.remotes['origin'].url)
  147. url = f"https://api.github.com/repos{base_url.path.split('.')[-2]}/{api}"
  148. resp= requests.get(url, headers={"Content-Type": "application/vnd.github.v3+json"})
  149. return json.loads(resp.text)
  150. def dump_directory(dir_path):
  151. # list to store files name
  152. res = []
  153. for (dir_path, dir_names, file_names) in walk(dir_path):
  154. res.extend(file_names)
  155. print(res)
  156. class ReleaseDetails():
  157. version:str
  158. idf:str
  159. platform:str
  160. branch:str
  161. bitrate:str
  162. def __init__(self,tag:str) -> None:
  163. self.version,self.idf,self.platform,self.branch=tag.split('#')
  164. try:
  165. self.version,self.bitrate = self.version.split('-')
  166. except Exception:
  167. pass
  168. def get_attributes(self):
  169. return {
  170. 'version': self.version,
  171. 'idf': self.idf,
  172. 'platform': self.platform,
  173. 'branch': self.branch,
  174. 'bitrate': self.bitrate
  175. }
  176. def format_prefix(self)->str:
  177. return f'{self.branch}-{self.platform}-{self.version}'
  178. def get_full_platform(self):
  179. return f"{self.platform}{f'-{self.bitrate}' if self.bitrate is not None else ''}"
  180. class BinFile():
  181. name:str
  182. offset:int
  183. source_full_path:str
  184. target_name:str
  185. target_fullpath:str
  186. artifact_relpath:str
  187. def __init__(self, source_path,file_build_path:str, offset:int,release_details:ReleaseDetails,build_dir) -> None:
  188. self.name = os.path.basename(file_build_path).rstrip()
  189. self.artifact_relpath = os.path.relpath(file_build_path,build_dir).rstrip()
  190. self.source_path = source_path
  191. self.source_full_path = os.path.join(source_path,file_build_path).rstrip()
  192. self.offset = offset
  193. self.target_name= f'{release_details.format_prefix()}-{self.name}'.rstrip()
  194. def get_manifest(self):
  195. return { "path": self.target_name , "offset": self.offset }
  196. def copy(self,target_folder)->str:
  197. self.target_fullpath=os.path.join(target_folder,self.target_name)
  198. logger.debug(f'file {self.source_full_path} will be copied to {self.target_fullpath}')
  199. try:
  200. os.makedirs(target_folder, exist_ok=True)
  201. shutil.copyfile(self.source_full_path, self.target_fullpath, follow_symlinks=True)
  202. except Exception as ex:
  203. print(f'::error::Error while copying {self.source_full_path} to {self.target_fullpath}' )
  204. print(f'::error::Content of {os.path.dirname(self.source_full_path.rstrip())}:')
  205. print('\n::error::'.join(get_file_list(os.path.dirname(self.source_full_path.rstrip()))))
  206. raise
  207. return self.target_fullpath
  208. def get_attributes(self):
  209. return {
  210. 'name':self.target_name,
  211. 'offset':self.offset,
  212. 'artifact_relpath':self.artifact_relpath
  213. }
  214. class PlatformRelease():
  215. name:str
  216. description:str
  217. url:str=''
  218. zipfile:str=''
  219. tempfolder:str
  220. release_details:ReleaseDetails
  221. flash_parms={}
  222. build_dir:str
  223. has_artifacts:bool
  224. branch:str
  225. assets:list
  226. bin_files:list
  227. name_prefix:str
  228. def get_manifest_name(self)->str:
  229. return f'{self.name_prefix}-{self.release_details.format_prefix()}.json'
  230. def __init__(self,git_release,flash_parms,build_dir, branch,name_prefix) -> None:
  231. self.name = git_release.tag_name
  232. self.description=git_release.body
  233. self.assets = git_release['assets']
  234. self.has_artifacts = False
  235. self.name_prefix = name_prefix
  236. if len(self.assets)>0:
  237. if self.has_asset_type():
  238. self.url=self.get_asset_from_extension().browser_download_url
  239. if self.has_asset_type('.zip'):
  240. self.zipfile=self.get_asset_from_extension(ext='.zip').browser_download_url
  241. self.has_artifacts = True
  242. self.release_details=ReleaseDetails(git_release.name)
  243. self.bin_files = list()
  244. self.flash_parms = flash_parms
  245. self.build_dir = build_dir
  246. self.branch = branch
  247. def process_files(self,outdir:str)->list:
  248. parts = []
  249. for f in self.bin_files:
  250. f.copy(outdir)
  251. parts.append(f.get_manifest())
  252. def get_asset_from_extension(self,ext='.bin'):
  253. for a in self.assets:
  254. filename=AttributeDict(a).name
  255. file_name, file_extension = os.path.splitext(filename)
  256. if file_extension == ext:
  257. return AttributeDict(a)
  258. return None
  259. def has_asset_type(self,ext='.bin')->bool:
  260. return self.get_asset_from_extension(ext) is not None
  261. def platform(self):
  262. return self.release_details.get_full_platform()
  263. def get_zip_file(self):
  264. self.tempfolder = extract_files_from_archive(self.zipfile)
  265. logger.info(f'Artifacts for {self.name} extracted to {self.tempfolder}')
  266. try:
  267. for artifact in artifacts_formats:
  268. base_name = os.path.basename(artifact[0]).rstrip().lstrip()
  269. self.bin_files.append(BinFile(self.tempfolder,artifact[0],self.flash_parms[base_name],self.release_details,self.build_dir))
  270. has_artifacts = True
  271. except Exception:
  272. self.has_artifacts = False
  273. def cleanup(self):
  274. logger.info(f'removing {self.name} temp directory {self.tempfolder}')
  275. shutil.rmtree(self.tempfolder)
  276. def get_attributes(self):
  277. return {
  278. 'name':self.name,
  279. 'branch':self.branch,
  280. 'description':self.description,
  281. 'url':self.url,
  282. 'zipfile':self.zipfile,
  283. 'release_details':self.release_details.get_attributes(),
  284. 'bin_files': [b.get_attributes() for b in self.bin_files],
  285. 'manifest_name': self.get_manifest_name()
  286. }
  287. class Releases():
  288. _dict:dict = collections.OrderedDict()
  289. maxcount:int =0
  290. branch:str=''
  291. repo:Repository=None
  292. manifest_name:str
  293. def __init__(self,branch:str,maxcount:int=3) -> None:
  294. self.maxcount = maxcount
  295. self.branch = branch
  296. def count(self,value:PlatformRelease)->int:
  297. content=self._dict.get(value.platform())
  298. if content == None:
  299. return 0
  300. return len(content)
  301. def get_platform(self,platform:str)->list:
  302. return self._dict[platform]
  303. def get_platform_keys(self):
  304. return self._dict.keys()
  305. def get_all(self)->list:
  306. result:list=[]
  307. for platform in [self.get_platform(platform) for platform in self.get_platform_keys()]:
  308. for release in platform:
  309. result.append(release)
  310. return result
  311. def append(self,value:PlatformRelease):
  312. # optional processing here
  313. if self.count(value) == 0:
  314. self._dict[value.platform()] = []
  315. if self.should_add(value):
  316. logger.info(f'Adding release {value.name} to the list')
  317. self._dict[value.platform()].append(value)
  318. else:
  319. logger.info(f'Skipping release {value.name}')
  320. def get_attributes(self):
  321. res = []
  322. release:PlatformRelease
  323. for release in self.get_all():
  324. res.append(release.get_attributes())
  325. return res
  326. def get_minlen(self)->int:
  327. return min([len(self.get_platform(p)) for p in self.get_platform_keys()])
  328. def got_all_packages(self)->bool:
  329. return self.get_minlen() >=self.maxcount
  330. def should_add(self,release:PlatformRelease)->bool:
  331. return self.count(release) <=self.maxcount
  332. def add_package(self,package:PlatformRelease, with_artifacts:bool=True):
  333. if self.branch != package.branch:
  334. logger.info(f'Skipping release {package.name} from branch {package.branch}')
  335. elif package.has_artifacts or not with_artifacts:
  336. self.append(package)
  337. @classmethod
  338. def get_last_commit(cls)->Commit:
  339. if cls.repo is None:
  340. cls.get_repository(os.getcwd())
  341. target=cls.repo.head.target
  342. last_commit=''
  343. try:
  344. last_commit=cls.repo[last_commit]
  345. logger.info(f'Last commit for target {target} is {last_commit}')
  346. except Exception as e:
  347. logger.error(f'Unable to retrieve last commit for target {target}: {e}')
  348. last_commit=None
  349. return last_commit
  350. @classmethod
  351. def get_repository(cls,path:str=os.getcwd())->Repository:
  352. if cls.repo is None:
  353. try:
  354. logger.info(f'Opening repository from {path}')
  355. cls.repo=Repository(path=path)
  356. except GitError as ex:
  357. print(f'::error::Error while trying to access the repository.')
  358. print(f'::error::Content of {path}:')
  359. print('\n::error::'.join(get_file_list(path)))
  360. raise
  361. return cls.repo
  362. @classmethod
  363. def resolve_commit(cls,repo:Repository,commit_id:str)->Commit:
  364. commit:Commit
  365. reference:Reference
  366. commit, reference = repo.resolve_refish(commit_id)
  367. return commit
  368. @classmethod
  369. def get_release_branch(cls,repo:Repository,platform_release)->str:
  370. match = [t for t in repo.branches.with_commit(platform_release.target_commitish)]
  371. no_origin = [t for t in match if 'origin' not in t]
  372. if len(no_origin) == 0 and len(match) > 0:
  373. return match[0].split('/')[1]
  374. elif len(no_origin) >0:
  375. return no_origin[0]
  376. return ''
  377. @classmethod
  378. def get_flash_parms(cls,file_path):
  379. flash = parse_json(file_path)
  380. od:collections.OrderedDict = collections.OrderedDict()
  381. for z in flash['flash_files'].items():
  382. base_name:str = os.path.basename(z[1])
  383. od[base_name.rstrip().lstrip()] = literal_eval( z[0])
  384. return collections.OrderedDict(sorted(od.items()))
  385. @classmethod
  386. def get_releases(cls,flash_file_path,maxcount:int,name_prefix):
  387. repo=Releases.get_repository(os.getcwd())
  388. flash_parms = Releases.get_flash_parms(flash_file_path)
  389. packages:Releases = cls(branch=repo.head.shorthand,maxcount=maxcount)
  390. build_dir=os.path.dirname(flash_file_path)
  391. for page in range(1,999):
  392. logger.debug(f'Getting releases page {page}')
  393. releases = get_github_data(repo,f'releases?per_page=50&page={page}')
  394. if len(releases)==0:
  395. logger.debug(f'No more release found for page {page}')
  396. break
  397. for release_entry in [AttributeDict(platform) for platform in releases]:
  398. packages.add_package(PlatformRelease(release_entry,flash_parms,build_dir,Releases.get_release_branch(repo,release_entry),name_prefix))
  399. if packages.got_all_packages():
  400. break
  401. if packages.got_all_packages():
  402. break
  403. return packages
  404. def update(self, *args, **kwargs):
  405. if args:
  406. if len(args) > 1:
  407. raise TypeError("update expected at most 1 arguments, "
  408. "got %d" % len(args))
  409. other = dict(args[0])
  410. for key in other:
  411. self[key] = other[key]
  412. for key in kwargs:
  413. self[key] = kwargs[key]
  414. def setdefault(self, key, value=None):
  415. if key not in self:
  416. self[key] = value
  417. return self[key]
  418. def set_workdir(args):
  419. logger.info(f'setting work dir to: {args.cwd}')
  420. os.chdir(os.path.abspath(args.cwd))
  421. def parse_json(filename:str):
  422. fname = os.path.abspath(filename)
  423. folder:str = os.path.abspath(os.path.dirname(filename))
  424. logger.info(f'Opening json file {fname} from {folder}')
  425. try:
  426. with open(fname) as f:
  427. content=f.read()
  428. logger.debug(f'Loading json\n{content}')
  429. return json.loads(content)
  430. except JSONDecodeError as ex:
  431. print(f'::error::Error parsing {content}')
  432. except Exception as ex:
  433. print(f'::error::Unable to parse flasher args json file. Content of {folder}:')
  434. print('\n::error::'.join(get_file_list(folder)))
  435. raise
  436. def write_github_env(args):
  437. logger.info(f'Writing environment details to {args.env_file}...')
  438. with open(args.env_file, "w") as env_file:
  439. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  440. line=f'{attr}={getattr(github_env,attr)}'
  441. logger.info(line)
  442. env_file.write(f'{line}\n')
  443. os.environ[attr] = str(getattr(github_env,attr))
  444. logger.info(f'Done writing environment details to {args.env_file}!')
  445. def set_workflow_output(args):
  446. logger.info(f'Outputting job variables ...')
  447. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  448. # use print instead of logger, as we need the raw output without the date/time prefix from logging
  449. print(f'::set-output name={attr}::{getattr(github_env,attr)}')
  450. os.environ[attr] = str(getattr(github_env,attr))
  451. logger.info(f'Done outputting job variables!')
  452. def format_commit(commit):
  453. #463a9d8b7 Merge branch 'bugfix/ci_deploy_tags_v4.0' into 'release/v4.0' (2020-01-11T14:08:55+08:00)
  454. dt = datetime.fromtimestamp(float(commit.author.time), timezone( timedelta(minutes=commit.author.offset) ))
  455. timestr = dt.strftime('%c%z')
  456. cmesg= commit.message.replace('\n', ' ' )
  457. return f'{commit.short_id} {cmesg} ({timestr}) <{commit.author.name}>'.replace(' ', ' ', )
  458. def format_artifact_name(base_name:str='',args = AttributeDict(os.environ)):
  459. return f'{base_name}{args.branch_name}-{args.node}-{args.depth}-{args.major}{args.build}'
  460. def handle_build_flags(args):
  461. set_workdir(args)
  462. logger.info('Setting global build flags')
  463. last:Commit = Releases.get_last_commit()
  464. commit_message:str= last.message.replace('\n', ' ')
  465. github_env.mock=1 if args.mock else 0
  466. github_env.release_flag=1 if args.mock or args.force or 'release' in commit_message.lower() else 0
  467. github_env.ui_build=1 if args.mock or args.ui_build or '[ui-build]' in commit_message.lower() or github_env.release_flag==1 else 0
  468. set_workflow_output(github_env)
  469. def handle_environment(args):
  470. set_workdir(args)
  471. logger.info('Setting environment variables...')
  472. last:Commit = Releases.get_last_commit()
  473. commit_message:str= last.message.replace('\n', ' ')
  474. github_env.author_name=last.author.name
  475. github_env.author_email=last.author.email
  476. github_env.committer_name=last.committer.name
  477. github_env.committer_email=last.committer.email
  478. github_env.node=args.node
  479. github_env.depth=args.depth
  480. github_env.major=args.major
  481. github_env.build=args.build
  482. github_env.DEPTH=args.depth
  483. github_env.TARGET_BUILD_NAME=args.node
  484. github_env.build_version_prefix=args.major
  485. github_env.branch_name=re.sub('[^a-zA-Z0-9\-~!@_\.]', '', Releases.get_repository().head.shorthand)
  486. github_env.BUILD_NUMBER=str(args.build)
  487. github_env.tag=f'{args.node}.{args.depth}.{args.build}.{github_env.branch_name}'.rstrip()
  488. github_env.last_commit=commit_message
  489. github_env.DOCKER_IMAGE_NAME=args.docker
  490. github_env.name=f"{args.major}.{str(args.build)}-{args.depth}#v4.3#{args.node}#{github_env.branch_name}"
  491. github_env.artifact_prefix=format_artifact_name('squeezelite-esp32-',github_env)
  492. github_env.artifact_file_name=f"{github_env.artifact_prefix}.zip"
  493. github_env.artifact_bin_file_name=f"{github_env.artifact_prefix}.bin"
  494. github_env.PROJECT_VER=f'{args.node}-{ args.build }'
  495. commit_list = []
  496. for c in [c for i,c in enumerate(Releases.get_repository().walk(last.id,pygit2.GIT_SORT_TIME)) if i<10]:
  497. commit_list.append(format_commit(c))
  498. github_env.description='### Revision Log<br><<~EOD\n'+'<br>\n'.join(commit_list)+'\n~EOD'
  499. write_github_env(args)
  500. def handle_artifacts(args):
  501. set_workdir(args)
  502. logger.info(f'Handling artifacts')
  503. for attr in artifacts_formats:
  504. target:str=attr[1].replace(artifacts_formats_outdir,args.outdir).replace(artifacts_formats_prefix,format_artifact_name())
  505. logger.debug(f'file {attr[0]} will be copied to {target}')
  506. try:
  507. os.makedirs(os.path.dirname(target), exist_ok=True)
  508. shutil.copyfile(attr[0].rstrip(), target, follow_symlinks=True)
  509. except Exception as ex:
  510. print(f'::error::Error while copying to {target}' )
  511. print(f'::error::Content of {os.path.dirname(attr[0].rstrip())}:')
  512. print('\n::error::'.join(get_file_list(os.path.dirname(attr[0].rstrip()))))
  513. raise
  514. def delete_folder(path):
  515. '''Remov Read Only Files'''
  516. for root, dirs, files in os.walk(path,topdown=True):
  517. for dir in dirs:
  518. fulldirpath=os.path.join(root, dir)
  519. logger.debug(f'Drilling down in {fulldirpath}')
  520. delete_folder(fulldirpath)
  521. for fname in files:
  522. full_path = os.path.join(root, fname)
  523. logger.debug(f'Setting file read/write {full_path}')
  524. os.chmod(full_path ,stat.S_IWRITE)
  525. logger.debug(f'Deleting file {full_path}')
  526. os.remove(full_path)
  527. if os.path.exists(path):
  528. logger.debug(f'Changing folder read/write {path}')
  529. os.chmod(path ,stat.S_IWRITE)
  530. logger.warning(f'Deleting Folder {path}')
  531. os.rmdir(path)
  532. def get_file_stats(path):
  533. fstat:os.stat_result = pathlib.Path(path).stat()
  534. # Convert file size to MB, KB or Bytes
  535. mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
  536. if (fstat.st_size > 1024 * 1024):
  537. return math.ceil(fstat.st_size / (1024 * 1024)), "MB", mtime
  538. elif (fstat.st_size > 1024):
  539. return math.ceil(fstat.st_size / 1024), "KB", mtime
  540. return fstat.st_size, "B", mtime
  541. def get_file_list(root_path, max_levels:int=2 )->list:
  542. outlist:list=[]
  543. for root, dirs, files in os.walk(root_path):
  544. path = root.split(os.sep)
  545. if len(path) <= max_levels:
  546. outlist.append(f'\n{root}')
  547. for file in files:
  548. full_name=os.path.join(root, file)
  549. fsize,unit,mtime = get_file_stats(full_name)
  550. outlist.append('{:s} {:8d} {:2s} {:18s}\t{:s}'.format(len(path) * "---",fsize,unit,mtime,file))
  551. return outlist
  552. def get_recursive_list(path)->list:
  553. outlist:list=[]
  554. for root, dirs, files in os.walk(path,topdown=True):
  555. for fname in files:
  556. outlist.append((fname,os.path.join(root,fname)))
  557. return outlist
  558. def handle_manifest(args):
  559. set_workdir(args)
  560. logger.info(f'Creating the web installer manifest')
  561. env = AttributeDict(os.environ)
  562. if not os.path.exists(os.path.dirname(args.outdir)):
  563. logger.info(f'Creating target folder {args.outdir}')
  564. os.makedirs(args.outdir, exist_ok=True)
  565. releases:Releases = Releases.get_releases(args.flash_file, args.max_count,args.manif_name)
  566. release:PlatformRelease
  567. for release in releases.get_all():
  568. release.get_zip_file()
  569. man = copy.deepcopy(manifest)
  570. man['manifest_name'] = release.get_manifest_name()
  571. man['builds'][0]['parts'] = release.process_files(args.outdir)
  572. man['name'] = release.platform()
  573. man['version'] = release.release_details.version
  574. logger.debug(f'Generated manifest: \n{json.dumps(man,indent=4)}')
  575. fullpath=os.path.join(args.outdir,release.get_manifest_name())
  576. logger.info(f'Writing manifest to {fullpath}')
  577. with open(fullpath, "w") as f:
  578. json.dump(man,f,indent=4)
  579. release.cleanup()
  580. mainmanifest=os.path.join(args.outdir,args.manif_name)
  581. logger.info(f'Writing main manifest {mainmanifest}')
  582. with open(mainmanifest,'w') as f:
  583. json.dump(releases.get_attributes(),f,indent=4)
  584. def get_new_file_names(manifest:str,source:str)->collections.OrderedDict():
  585. artifacts = parse_json(os.path.join(source,manifest))
  586. new_release_files:dict = collections.OrderedDict()
  587. for artifact in artifacts:
  588. for name in [f["name"] for f in artifact["bin_files"]]:
  589. new_release_files[name] = artifact
  590. new_release_files[artifact['manifest_name']] = artifact['name']
  591. return new_release_files
  592. def copy_no_overwrite(source:str,target:str) :
  593. sfiles = os.listdir(source)
  594. for f in sfiles:
  595. source_file = os.path.join(source,f)
  596. target_file = os.path.join(target,f)
  597. if not os.path.exists(target_file):
  598. logger.info(f'Copying {f} to target')
  599. shutil.copy(source_file, target_file)
  600. else:
  601. logger.debug(f'Skipping existing file {f}')
  602. def get_changed_items(repo:Repository)->Dict:
  603. changed_filemode_status_code: int = pygit2.GIT_FILEMODE_TREE
  604. original_status_dict: Dict[str, int] = repo.status()
  605. # transfer any non-filemode changes to a new dictionary
  606. status_dict: Dict[str, int] = {}
  607. for filename, code in original_status_dict.items():
  608. if code != changed_filemode_status_code:
  609. status_dict[filename] = code
  610. return status_dict
  611. def is_dirty(repo:Repository)->bool:
  612. return len(get_changed_items(repo)) > 0
  613. def push_if_change(repo:Repository, token:str):
  614. if is_dirty(repo):
  615. logger.info(f'Changes found. Preparing commit')
  616. env = AttributeDict(os.environ)
  617. index:Index = repo.index
  618. index.add_all()
  619. index.write()
  620. reference=repo.head.name
  621. author = Signature(env.author_name,env.author_email)
  622. committer = Signature(env.committer_name, env.committer_email)
  623. message = f'Web installer for {format_artifact_name()}'
  624. tree = index.write_tree()
  625. commit = repo.create_commit(reference, author, committer, message, tree,[repo.head.target])
  626. origin:Remote=repo.remotes['origin']
  627. logger.info(f'Pushing commit {format_commit(repo[commit])} to url {origin.url}')
  628. credentials = UserPass(token, 'x-oauth-basic') # passing credentials
  629. remote:Remote = repo.remotes['origin']
  630. remote.credentials = credentials
  631. remote.push([reference],callbacks= RemoteCallbacks(UserPass(token, 'x-oauth-basic')))
  632. else:
  633. logger.warning(f'No change found. Skipping update')
  634. def update_files(target_artifacts:str,manif_name:str,source:str):
  635. new_list:dict = get_new_file_names(manif_name, os.path.abspath(source))
  636. if os.path.exists(target_artifacts):
  637. logger.info(f'Removing obsolete files from {target_artifacts}')
  638. for entry in get_recursive_list(target_artifacts):
  639. f=entry[0]
  640. full_target=entry[1]
  641. if f not in new_list.keys():
  642. logger.warning(f'Removing obsolete file {f}')
  643. os.remove(full_target)
  644. else:
  645. logger.info(f'Creating target folder {target_artifacts}')
  646. os.makedirs(target_artifacts, exist_ok=True)
  647. logger.info(f'Copying installer files to {target_artifacts}:')
  648. copy_no_overwrite(os.path.abspath(source), target_artifacts)
  649. def handle_pushinstaller(args):
  650. set_workdir(args)
  651. logger.info('Pushing web installer updates... ')
  652. target_artifacts = os.path.join(args.target,args.artifacts)
  653. if os.path.exists(args.target):
  654. logger.info(f'Removing files (if any) from {args.target}')
  655. delete_folder(args.target)
  656. logger.info(f'Cloning from {args.url} into {args.target}')
  657. repo = pygit2.clone_repository(args.url,args.target)
  658. repo.checkout_head()
  659. update_files(target_artifacts,args.manif_name,args.source)
  660. push_if_change(repo,args.token)
  661. repo.state_cleanup()
  662. def handle_show(args):
  663. logger.info('Show')
  664. def extract_files_from_archive(url):
  665. tempfolder= tempfile.mkdtemp()
  666. platform = requests.get(url)
  667. z = zipfile.ZipFile(io.BytesIO(platform.content))
  668. z.extractall(tempfolder)
  669. return tempfolder
  670. def handle_list_files(args):
  671. print(f'Content of {args.cwd}:')
  672. print('\n'.join(get_file_list(args.cwd)))
  673. parser_environment.set_defaults(func=handle_environment, cmd='environment')
  674. parser_artifacts.set_defaults(func=handle_artifacts, cmd='artifacts')
  675. parser_manifest.set_defaults(func=handle_manifest, cmd='manifest')
  676. parser_pushinstaller.set_defaults(func=handle_pushinstaller, cmd='installer')
  677. parser_show.set_defaults(func=handle_show, cmd='show')
  678. parser_build_flags.set_defaults(func=handle_build_flags, cmd='build_flags')
  679. parser_dir.set_defaults(func=handle_list_files, cmd='list_files')
  680. def main():
  681. args = parser.parse_args()
  682. logger.setLevel(logging.getLevelName(args.loglevel))
  683. logger.info(f'build_tools version : {tool_version}')
  684. logger.debug(f'Processing command {args.command}')
  685. func:Callable = getattr(args, 'func', None)
  686. if func is not None:
  687. # Call whatever subcommand function was selected
  688. func(args)
  689. else:
  690. # No subcommand was provided, so call help
  691. parser.print_usage()
  692. if __name__ == '__main__':
  693. main()