build_tools.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737
  1. #!/usr/bin/env python
  2. from json import JSONDecodeError
  3. import math
  4. import pathlib
  5. import time
  6. from typing import Callable,Dict, Union
  7. import pkg_resources
  8. import sys
  9. import os
  10. import io
  11. from os import stat_result, walk
  12. try:
  13. import argparse
  14. import collections
  15. import copy
  16. import enum
  17. import glob
  18. import json
  19. import logging
  20. import re
  21. import shutil
  22. import stat
  23. import tempfile
  24. import zipfile
  25. from ast import literal_eval
  26. from collections import namedtuple
  27. from datetime import datetime, timedelta, timezone
  28. from json import JSONDecoder
  29. from operator import contains
  30. from platform import platform, release
  31. from pydoc import describe
  32. from time import strftime
  33. from typing import OrderedDict
  34. from urllib import response
  35. from urllib.parse import urlparse
  36. from urllib.request import Request
  37. from webbrowser import get
  38. import pygit2
  39. from pygit2 import Commit,Repository,GitError,Reference,UserPass,Index,Signature,RemoteCallbacks, Remote
  40. import requests
  41. from genericpath import isdir
  42. except ImportError as ex:
  43. print(f'::error::Failed importing module {ex.name}, using interpreter {sys.executable}. \n Installed packages:')
  44. installed_packages = pkg_resources.working_set
  45. installed_packages_list = sorted(["%s==%s" % (i.key, i.version) for i in installed_packages])
  46. print('\n'.join(installed_packages_list))
  47. print(f'Environment: ')
  48. envlist="\n".join( [f"{k}={v}" for k,v in sorted(os.environ.items())])
  49. print(f'{envlist}')
  50. raise
  51. FORMAT = '%(asctime)s %(message)s'
  52. logging.basicConfig(format=FORMAT)
  53. logger:logging.Logger = logging.getLogger(__name__)
  54. github_env= type('', (), {})()
  55. tool_version= "1.0.5"
  56. manifest={
  57. "name": "",
  58. "version": "",
  59. "home_assistant_domain": "slim_player",
  60. "funding_url": "https://esphome.io/guides/supporters.html",
  61. "builds": [
  62. {
  63. "chipFamily": "ESP32",
  64. "parts": [
  65. ]
  66. }
  67. ]
  68. }
  69. artifacts_formats_outdir= '$OUTDIR'
  70. artifacts_formats_prefix= '$PREFIX'
  71. artifacts_formats = [
  72. ['build/squeezelite.bin', '$OUTDIR/$PREFIX-squeezelite.bin'],
  73. ['build/recovery.bin', '$OUTDIR/$PREFIX-recovery.bin'],
  74. ['build/ota_data_initial.bin', '$OUTDIR/$PREFIX-ota_data_initial.bin'],
  75. ['build/bootloader/bootloader.bin', '$OUTDIR/$PREFIX-bootloader.bin'],
  76. ['build/partition_table/partition-table.bin ', '$OUTDIR/$PREFIX-partition-table.bin'],
  77. ]
  78. class AttributeDict(dict):
  79. __slots__ = ()
  80. def __getattr__(self, name:str):
  81. try:
  82. return self[name.upper()]
  83. except Exception:
  84. try:
  85. return self[name.lower()]
  86. except Exception:
  87. for attr in self.keys():
  88. if name.lower() == attr.replace("'","").lower() :
  89. return self[attr]
  90. __setattr__ = dict.__setitem__
  91. parser = argparse.ArgumentParser(description='Handles some parts of the squeezelite-esp32 build process')
  92. parser.add_argument('--cwd', type=str,help='Working directory', default=os.getcwd())
  93. parser.add_argument('--loglevel', type=str,choices={'CRITICAL','ERROR','WARNING','INFO','DEBUG','NOTSET'}, help='Logging level', default='INFO')
  94. subparsers = parser.add_subparsers( dest='command', required=True)
  95. parser_dir = subparsers.add_parser("list_files",
  96. add_help=False,
  97. description="List Files parser",
  98. help="Display the content of the folder")
  99. parser_manifest = subparsers.add_parser("manifest",
  100. add_help=False,
  101. description="Manifest parser",
  102. help="Handles the web installer manifest creation")
  103. parser_manifest.add_argument('--flash_file', required=True, type=str,help='The file path which contains the firmware flashing definition')
  104. parser_manifest.add_argument('--max_count', type=int,help='The maximum number of releases to keep', default=3)
  105. parser_manifest.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  106. parser_manifest.add_argument('--outdir', required=True,type=str,help='Output directory for files and manifests')
  107. parser_artifacts = subparsers.add_parser("artifacts",
  108. add_help=False,
  109. description="Artifacts parser",
  110. help="Handles the creation of artifacts files")
  111. parser_artifacts.add_argument('--outdir', type=str,help='Output directory for artifact files', default='./artifacts/')
  112. parser_pushinstaller = subparsers.add_parser("pushinstaller",
  113. add_help=False,
  114. description="Web Installer Checkout parser",
  115. help="Handles the creation of artifacts files")
  116. parser_pushinstaller.add_argument('--target', type=str,help='Output directory for web installer repository', default='./web_installer/')
  117. parser_pushinstaller.add_argument('--artifacts', type=str,help='Target subdirectory for web installer artifacts', default='./web_installer/')
  118. parser_pushinstaller.add_argument('--source', type=str,help='Source directory for the installer artifacts', default='./web_installer/')
  119. parser_pushinstaller.add_argument('--url', type=str,help='Web Installer clone url ', default='https://github.com/sle118/squeezelite-esp32-installer.git')
  120. parser_pushinstaller.add_argument('--web_installer_branch', type=str,help='Web Installer branch to use ', default='main')
  121. parser_pushinstaller.add_argument('--token', type=str,help='Auth token for pushing changes')
  122. parser_pushinstaller.add_argument('--flash_file', type=str,help='Manifest json file path')
  123. parser_pushinstaller.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  124. parser_environment = subparsers.add_parser("environment",
  125. add_help=False,
  126. description="Environment parser",
  127. help="Updates the build environment")
  128. parser_environment.add_argument('--env_file', type=str,help='Environment File', default=os.environ.get('GITHUB_ENV'))
  129. parser_environment.add_argument('--build', required=True, type=int,help='The build number')
  130. parser_environment.add_argument('--node', required=True, type=str,help='The matrix node being built')
  131. parser_environment.add_argument('--depth', required=True, type=int,help='The bit depth being built')
  132. parser_environment.add_argument('--major', type=str,help='Major version', default='2')
  133. parser_environment.add_argument('--docker', type=str,help='Docker image to use',default='sle118/squeezelite-esp32-idfv43')
  134. parser_show = subparsers.add_parser("show",
  135. add_help=False,
  136. description="Show parser",
  137. help="Show the build environment")
  138. parser_build_flags = subparsers.add_parser("build_flags",
  139. add_help=False,
  140. description="Build Flags",
  141. help="Updates the build environment with build flags")
  142. parser_build_flags.add_argument('--mock', action='store_true',help='Mock release')
  143. parser_build_flags.add_argument('--force', action='store_true',help='Force a release build')
  144. parser_build_flags.add_argument('--ui_build', action='store_true',help='Include building the web UI')
  145. def get_github_data(repo:Repository,api):
  146. base_url = urlparse(repo.remotes['origin'].url)
  147. url = f"https://api.github.com/repos{base_url.path.split('.')[-2]}/{api}"
  148. resp= requests.get(url, headers={"Content-Type": "application/vnd.github.v3+json"})
  149. return json.loads(resp.text)
  150. def dump_directory(dir_path):
  151. # list to store files name
  152. res = []
  153. for (dir_path, dir_names, file_names) in walk(dir_path):
  154. res.extend(file_names)
  155. print(res)
  156. class ReleaseDetails():
  157. version:str
  158. idf:str
  159. platform:str
  160. branch:str
  161. bitrate:str
  162. def __init__(self,tag:str) -> None:
  163. self.version,self.idf,self.platform,self.branch=tag.split('#')
  164. try:
  165. self.version,self.bitrate = self.version.split('-')
  166. except Exception:
  167. pass
  168. def get_attributes(self):
  169. return {
  170. 'version': self.version,
  171. 'idf': self.idf,
  172. 'platform': self.platform,
  173. 'branch': self.branch,
  174. 'bitrate': self.bitrate
  175. }
  176. def format_prefix(self)->str:
  177. return f'{self.branch}-{self.platform}-{self.version}'
  178. def get_full_platform(self):
  179. return f"{self.platform}{f'-{self.bitrate}' if self.bitrate is not None else ''}"
  180. class BinFile():
  181. name:str
  182. offset:int
  183. source_full_path:str
  184. target_name:str
  185. target_fullpath:str
  186. artifact_relpath:str
  187. def __init__(self, source_path,file_build_path:str, offset:int,release_details:ReleaseDetails,build_dir) -> None:
  188. self.name = os.path.basename(file_build_path).rstrip()
  189. self.artifact_relpath = os.path.relpath(file_build_path,build_dir).rstrip()
  190. self.source_path = source_path
  191. self.source_full_path = os.path.join(source_path,file_build_path).rstrip()
  192. self.offset = offset
  193. self.target_name= f'{release_details.format_prefix()}-{self.name}'.rstrip()
  194. def get_manifest(self):
  195. return { "path": self.target_name , "offset": self.offset }
  196. def copy(self,target_folder)->str:
  197. self.target_fullpath=os.path.join(target_folder,self.target_name)
  198. logger.debug(f'file {self.source_full_path} will be copied to {self.target_fullpath}')
  199. try:
  200. os.makedirs(target_folder, exist_ok=True)
  201. shutil.copyfile(self.source_full_path, self.target_fullpath, follow_symlinks=True)
  202. except Exception as ex:
  203. print(f'::error::Error while copying {self.source_full_path} to {self.target_fullpath}' )
  204. print(f'::error::Content of {os.path.dirname(self.source_full_path.rstrip())}:')
  205. print('\n::error::'.join(get_file_list(os.path.dirname(self.source_full_path.rstrip()))))
  206. raise
  207. return self.target_fullpath
  208. def get_attributes(self):
  209. return {
  210. 'name':self.target_name,
  211. 'offset':self.offset,
  212. 'artifact_relpath':self.artifact_relpath
  213. }
  214. class PlatformRelease():
  215. name:str
  216. description:str
  217. url:str=''
  218. zipfile:str=''
  219. tempfolder:str
  220. release_details:ReleaseDetails
  221. flash_parms={}
  222. build_dir:str
  223. has_artifacts:bool
  224. branch:str
  225. assets:list
  226. bin_files:list
  227. name_prefix:str
  228. def get_manifest_name(self)->str:
  229. return f'{self.name_prefix}-{self.release_details.format_prefix()}.json'
  230. def __init__(self,git_release,flash_parms,build_dir, branch,name_prefix) -> None:
  231. self.name = git_release.tag_name
  232. self.description=git_release.body
  233. self.assets = git_release['assets']
  234. self.has_artifacts = False
  235. self.name_prefix = name_prefix
  236. if len(self.assets)>0:
  237. if self.has_asset_type():
  238. self.url=self.get_asset_from_extension().browser_download_url
  239. if self.has_asset_type('.zip'):
  240. self.zipfile=self.get_asset_from_extension(ext='.zip').browser_download_url
  241. self.has_artifacts = True
  242. self.release_details=ReleaseDetails(git_release.name)
  243. self.bin_files = list()
  244. self.flash_parms = flash_parms
  245. self.build_dir = build_dir
  246. self.branch = branch
  247. def process_files(self,outdir:str)->list:
  248. parts = []
  249. for f in self.bin_files:
  250. f.copy(outdir)
  251. parts.append(f.get_manifest())
  252. def get_asset_from_extension(self,ext='.bin'):
  253. for a in self.assets:
  254. filename=AttributeDict(a).name
  255. file_name, file_extension = os.path.splitext(filename)
  256. if file_extension == ext:
  257. return AttributeDict(a)
  258. return None
  259. def has_asset_type(self,ext='.bin')->bool:
  260. return self.get_asset_from_extension(ext) is not None
  261. def platform(self):
  262. return self.release_details.get_full_platform()
  263. def get_zip_file(self):
  264. self.tempfolder = extract_files_from_archive(self.zipfile)
  265. logger.info(f'Artifacts for {self.name} extracted to {self.tempfolder}')
  266. try:
  267. for artifact in artifacts_formats:
  268. base_name = os.path.basename(artifact[0]).rstrip().lstrip()
  269. self.bin_files.append(BinFile(self.tempfolder,artifact[0],self.flash_parms[base_name],self.release_details,self.build_dir))
  270. has_artifacts = True
  271. except Exception:
  272. self.has_artifacts = False
  273. def cleanup(self):
  274. logger.info(f'removing {self.name} temp directory {self.tempfolder}')
  275. shutil.rmtree(self.tempfolder)
  276. def get_attributes(self):
  277. return {
  278. 'name':self.name,
  279. 'branch':self.branch,
  280. 'description':self.description,
  281. 'url':self.url,
  282. 'zipfile':self.zipfile,
  283. 'release_details':self.release_details.get_attributes(),
  284. 'bin_files': [b.get_attributes() for b in self.bin_files],
  285. 'manifest_name': self.get_manifest_name()
  286. }
  287. class Releases():
  288. _dict:dict = collections.OrderedDict()
  289. maxcount:int =0
  290. branch:str=''
  291. repo:Repository=None
  292. manifest_name:str
  293. def __init__(self,branch:str,maxcount:int=3) -> None:
  294. self.maxcount = maxcount
  295. self.branch = branch
  296. def count(self,value:PlatformRelease)->int:
  297. content=self._dict.get(value.platform())
  298. if content == None:
  299. return 0
  300. return len(content)
  301. def get_platform(self,platform:str)->list:
  302. return self._dict[platform]
  303. def get_platform_keys(self):
  304. return self._dict.keys()
  305. def get_all(self)->list:
  306. result:list=[]
  307. for platform in [self.get_platform(platform) for platform in self.get_platform_keys()]:
  308. for release in platform:
  309. result.append(release)
  310. return result
  311. def append(self,value:PlatformRelease):
  312. # optional processing here
  313. if self.count(value) == 0:
  314. self._dict[value.platform()] = []
  315. if self.should_add(value):
  316. logger.info(f'Adding release {value.name} to the list')
  317. self._dict[value.platform()].append(value)
  318. else:
  319. logger.info(f'Skipping release {value.name}')
  320. def get_attributes(self):
  321. res = []
  322. release:PlatformRelease
  323. for release in self.get_all():
  324. res.append(release.get_attributes())
  325. return res
  326. def get_minlen(self)->int:
  327. return min([len(self.get_platform(p)) for p in self.get_platform_keys()])
  328. def got_all_packages(self)->bool:
  329. return self.get_minlen() >=self.maxcount
  330. def should_add(self,release:PlatformRelease)->bool:
  331. return self.count(release) <=self.maxcount
  332. def add_package(self,package:PlatformRelease, with_artifacts:bool=True):
  333. if self.branch != package.branch:
  334. logger.info(f'Skipping release {package.name} from branch {package.branch}')
  335. elif package.has_artifacts or not with_artifacts:
  336. self.append(package)
  337. @classmethod
  338. def get_last_commit(cls)->Commit:
  339. if cls.repo is None:
  340. cls.get_repository(os.getcwd())
  341. return cls.repo[cls.repo.head.target]
  342. @classmethod
  343. def get_repository(cls,path:str=os.getcwd())->Repository:
  344. if cls.repo is None:
  345. try:
  346. logger.info(f'Opening repository from {path}')
  347. cls.repo=Repository(path=path)
  348. except GitError as ex:
  349. print(f'::error::Error while trying to access the repository.')
  350. print(f'::error::Content of {path}:')
  351. print('\n::error::'.join(get_file_list(path)))
  352. raise
  353. return cls.repo
  354. @classmethod
  355. def resolve_commit(cls,repo:Repository,commit_id:str)->Commit:
  356. commit:Commit
  357. reference:Reference
  358. commit, reference = repo.resolve_refish(commit_id)
  359. return commit
  360. @classmethod
  361. def get_release_branch(cls,repo:Repository,platform_release)->str:
  362. match = [t for t in repo.branches.with_commit(platform_release.target_commitish)]
  363. no_origin = [t for t in match if 'origin' not in t]
  364. if len(no_origin) == 0 and len(match) > 0:
  365. return match[0].split('/')[1]
  366. elif len(no_origin) >0:
  367. return no_origin[0]
  368. return ''
  369. @classmethod
  370. def get_flash_parms(cls,file_path):
  371. flash = parse_json(file_path)
  372. od:collections.OrderedDict = collections.OrderedDict()
  373. for z in flash['flash_files'].items():
  374. base_name:str = os.path.basename(z[1])
  375. od[base_name.rstrip().lstrip()] = literal_eval( z[0])
  376. return collections.OrderedDict(sorted(od.items()))
  377. @classmethod
  378. def get_releases(cls,flash_file_path,maxcount:int,name_prefix):
  379. repo=Releases.get_repository(os.getcwd())
  380. flash_parms = Releases.get_flash_parms(flash_file_path)
  381. packages:Releases = cls(branch=repo.head.shorthand,maxcount=maxcount)
  382. build_dir=os.path.dirname(flash_file_path)
  383. for page in range(1,999):
  384. logger.debug(f'Getting releases page {page}')
  385. releases = get_github_data(repo,f'releases?per_page=50&page={page}')
  386. if len(releases)==0:
  387. logger.debug(f'No more release found for page {page}')
  388. break
  389. for release_entry in [AttributeDict(platform) for platform in releases]:
  390. packages.add_package(PlatformRelease(release_entry,flash_parms,build_dir,Releases.get_release_branch(repo,release_entry),name_prefix))
  391. if packages.got_all_packages():
  392. break
  393. if packages.got_all_packages():
  394. break
  395. return packages
  396. def update(self, *args, **kwargs):
  397. if args:
  398. if len(args) > 1:
  399. raise TypeError("update expected at most 1 arguments, "
  400. "got %d" % len(args))
  401. other = dict(args[0])
  402. for key in other:
  403. self[key] = other[key]
  404. for key in kwargs:
  405. self[key] = kwargs[key]
  406. def setdefault(self, key, value=None):
  407. if key not in self:
  408. self[key] = value
  409. return self[key]
  410. def set_workdir(args):
  411. logger.info(f'setting work dir to: {args.cwd}')
  412. os.chdir(os.path.abspath(args.cwd))
  413. def parse_json(filename:str):
  414. fname = os.path.abspath(filename)
  415. folder:str = os.path.abspath(os.path.dirname(filename))
  416. logger.info(f'Opening json file {fname} from {folder}')
  417. try:
  418. with open(fname) as f:
  419. content=f.read()
  420. logger.debug(f'Loading json\n{content}')
  421. return json.loads(content)
  422. except JSONDecodeError as ex:
  423. print(f'::error::Error parsing {content}')
  424. except Exception as ex:
  425. print(f'::error::Unable to parse flasher args json file. Content of {folder}:')
  426. print('\n::error::'.join(get_file_list(folder)))
  427. raise
  428. def write_github_env(args):
  429. logger.info(f'Writing environment details to {args.env_file}...')
  430. with open(args.env_file, "w") as env_file:
  431. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  432. line=f'{attr}={getattr(github_env,attr)}'
  433. logger.info(line)
  434. env_file.write(f'{line}\n')
  435. os.environ[attr] = str(getattr(github_env,attr))
  436. logger.info(f'Done writing environment details to {args.env_file}!')
  437. def set_workflow_output(args):
  438. logger.info(f'Outputting job variables ...')
  439. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  440. # use print instead of logger, as we need the raw output without the date/time prefix from logging
  441. print(f'::set-output name={attr}::{getattr(github_env,attr)}')
  442. os.environ[attr] = str(getattr(github_env,attr))
  443. logger.info(f'Done outputting job variables!')
  444. def format_commit(commit):
  445. #463a9d8b7 Merge branch 'bugfix/ci_deploy_tags_v4.0' into 'release/v4.0' (2020-01-11T14:08:55+08:00)
  446. dt = datetime.fromtimestamp(float(commit.author.time), timezone( timedelta(minutes=commit.author.offset) ))
  447. timestr = dt.strftime('%c%z')
  448. cmesg= commit.message.replace('\n', ' ' )
  449. return f'{commit.short_id} {cmesg} ({timestr}) <{commit.author.name}>'.replace(' ', ' ', )
  450. def format_artifact_name(base_name:str='',args = AttributeDict(os.environ)):
  451. return f'{base_name}{args.branch_name}-{args.node}-{args.depth}-{args.major}{args.build}'
  452. def handle_build_flags(args):
  453. set_workdir(args)
  454. logger.info('Setting global build flags')
  455. last:Commit = Releases.get_last_commit()
  456. commit_message:str= last.message.replace('\n', ' ')
  457. github_env.mock=1 if args.mock else 0
  458. github_env.release_flag=1 if args.mock or args.force or 'release' in commit_message.lower() else 0
  459. github_env.ui_build=1 if args.mock or args.ui_build or '[ui-build]' in commit_message.lower() or github_env.release_flag==1 else 0
  460. set_workflow_output(github_env)
  461. def handle_environment(args):
  462. set_workdir(args)
  463. logger.info('Setting environment variables...')
  464. last:Commit = Releases.get_last_commit()
  465. commit_message:str= last.message.replace('\n', ' ')
  466. github_env.author_name=last.author.name
  467. github_env.author_email=last.author.email
  468. github_env.committer_name=last.committer.name
  469. github_env.committer_email=last.committer.email
  470. github_env.node=args.node
  471. github_env.depth=args.depth
  472. github_env.major=args.major
  473. github_env.build=args.build
  474. github_env.DEPTH=args.depth
  475. github_env.TARGET_BUILD_NAME=args.node
  476. github_env.build_version_prefix=args.major
  477. github_env.branch_name=re.sub('[^a-zA-Z0-9\-~!@_\.]', '', Releases.get_repository().head.shorthand)
  478. github_env.BUILD_NUMBER=str(args.build)
  479. github_env.tag=f'{args.node}.{args.depth}.{args.build}.{github_env.branch_name}'.rstrip()
  480. github_env.last_commit=commit_message
  481. github_env.DOCKER_IMAGE_NAME=args.docker
  482. github_env.name=f"{args.major}.{str(args.build)}-{args.depth}#v4.3#{args.node}#{github_env.branch_name}"
  483. github_env.artifact_prefix=format_artifact_name('squeezelite-esp32-',github_env)
  484. github_env.artifact_file_name=f"{github_env.artifact_prefix}.zip"
  485. github_env.artifact_bin_file_name=f"{github_env.artifact_prefix}.bin"
  486. github_env.PROJECT_VER=f'{args.node}-{ args.build }'
  487. github_env.description='### Revision Log<br><<~EOD\n'+'<br>\n'.join(format_commit(c) for i,c in enumerate(Releases.get_repository().walk(last.id,pygit2.GIT_SORT_TIME)) if i<10)+'\n~EOD'
  488. write_github_env(args)
  489. def handle_artifacts(args):
  490. set_workdir(args)
  491. logger.info(f'Handling artifacts')
  492. for attr in artifacts_formats:
  493. target:str=attr[1].replace(artifacts_formats_outdir,args.outdir).replace(artifacts_formats_prefix,format_artifact_name())
  494. logger.debug(f'file {attr[0]} will be copied to {target}')
  495. try:
  496. os.makedirs(os.path.dirname(target), exist_ok=True)
  497. shutil.copyfile(attr[0].rstrip(), target, follow_symlinks=True)
  498. except Exception as ex:
  499. print(f'::error::Error while copying to {target}' )
  500. print(f'::error::Content of {os.path.dirname(attr[0].rstrip())}:')
  501. print('\n::error::'.join(get_file_list(os.path.dirname(attr[0].rstrip()))))
  502. raise
  503. def delete_folder(path):
  504. '''Remov Read Only Files'''
  505. for root, dirs, files in os.walk(path,topdown=True):
  506. for dir in dirs:
  507. fulldirpath=os.path.join(root, dir)
  508. logger.debug(f'Drilling down in {fulldirpath}')
  509. delete_folder(fulldirpath)
  510. for fname in files:
  511. full_path = os.path.join(root, fname)
  512. logger.debug(f'Setting file read/write {full_path}')
  513. os.chmod(full_path ,stat.S_IWRITE)
  514. logger.debug(f'Deleting file {full_path}')
  515. os.remove(full_path)
  516. if os.path.exists(path):
  517. logger.debug(f'Changing folder read/write {path}')
  518. os.chmod(path ,stat.S_IWRITE)
  519. logger.warning(f'Deleting Folder {path}')
  520. os.rmdir(path)
  521. def get_file_stats(path)->tuple[int,str,str]:
  522. fstat:os.stat_result = pathlib.Path(path).stat()
  523. # Convert file size to MB, KB or Bytes
  524. mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
  525. if (fstat.st_size > 1024 * 1024):
  526. return math.ceil(fstat.st_size / (1024 * 1024)), "MB", mtime
  527. elif (fstat.st_size > 1024):
  528. return math.ceil(fstat.st_size / 1024), "KB", mtime
  529. return fstat.st_size, "B", mtime
  530. def get_file_list(root_path, max_levels:int=2 )->list:
  531. outlist:list=[]
  532. for root, dirs, files in os.walk(root_path):
  533. path = root.split(os.sep)
  534. if len(path) <= max_levels:
  535. outlist.append(f'\n{root}')
  536. for file in files:
  537. full_name=os.path.join(root, file)
  538. fsize,unit,mtime = get_file_stats(full_name)
  539. outlist.append('{:s} {:8d} {:2s} {:18s}\t{:s}'.format(len(path) * "---",fsize,unit,mtime,file))
  540. return outlist
  541. def get_recursive_list(path)->list:
  542. outlist:list=[]
  543. for root, dirs, files in os.walk(path,topdown=True):
  544. for dir in dirs:
  545. outlist.extend(get_recursive_list(os.path.join(root, dir)))
  546. for fname in files:
  547. outlist.append(fname)
  548. # if os.path.exists(path):
  549. # outlist.append(path)
  550. outlist.sort()
  551. return outlist
  552. def handle_manifest(args):
  553. set_workdir(args)
  554. logger.info(f'Creating the web installer manifest')
  555. env = AttributeDict(os.environ)
  556. if not os.path.exists(os.path.dirname(args.outdir)):
  557. logger.info(f'Creating target folder {args.outdir}')
  558. os.makedirs(args.outdir, exist_ok=True)
  559. releases:Releases = Releases.get_releases(args.flash_file, args.max_count,args.manif_name)
  560. release:PlatformRelease
  561. for release in releases.get_all():
  562. release.get_zip_file()
  563. man = copy.deepcopy(manifest)
  564. man['manifest_name'] = release.get_manifest_name()
  565. man['builds'][0]['parts'] = release.process_files(args.outdir)
  566. man['name'] = release.platform()
  567. man['version'] = release.release_details.version
  568. logger.debug(f'Generated manifest: \n{json.dumps(man,indent=4)}')
  569. fullpath=os.path.join(args.outdir,release.get_manifest_name())
  570. logger.info(f'Writing manifest to {fullpath}')
  571. with open(fullpath, "w") as f:
  572. json.dump(man,f,indent=4)
  573. release.cleanup()
  574. mainmanifest=os.path.join(args.outdir,args.manif_name)
  575. logger.info(f'Writing main manifest {mainmanifest}')
  576. with open(mainmanifest,'w') as f:
  577. json.dump(releases.get_attributes(),f,indent=4)
  578. def get_new_file_names(manifest:str,source:str)->collections.OrderedDict():
  579. artifacts = parse_json(os.path.join(source,manifest))
  580. new_release_files:dict = collections.OrderedDict()
  581. for artifact in artifacts:
  582. for name in [f["name"] for f in artifact["bin_files"]]:
  583. new_release_files[name] = artifact
  584. new_release_files[artifact['manifest_name']] = artifact['name']
  585. return new_release_files
  586. def copy_no_overwrite(source:str,target:str) :
  587. sfiles = os.listdir(source)
  588. for f in sfiles:
  589. source_file = os.path.join(source,f)
  590. target_file = os.path.join(target,f)
  591. if not os.path.exists(target_file):
  592. logger.info(f'Copying {f} to target')
  593. shutil.copy(source_file, target_file)
  594. else:
  595. logger.debug(f'Skipping existing file {f}')
  596. def get_changed_items(repo:Repository)->Dict:
  597. changed_filemode_status_code: int = pygit2.GIT_FILEMODE_TREE
  598. original_status_dict: Dict[str, int] = repo.status()
  599. # transfer any non-filemode changes to a new dictionary
  600. status_dict: Dict[str, int] = {}
  601. for filename, code in original_status_dict.items():
  602. if code != changed_filemode_status_code:
  603. status_dict[filename] = code
  604. return status_dict
  605. def is_dirty(repo:Repository)->bool:
  606. return len(get_changed_items(repo)) > 0
  607. def push_if_change(repo:Repository, token:str):
  608. if is_dirty(repo):
  609. logger.info(f'Changes found. Preparing commit')
  610. env = AttributeDict(os.environ)
  611. index:Index = repo.index
  612. index.add_all()
  613. index.write()
  614. reference=repo.head.name
  615. author = Signature(env.author_name,env.author_email)
  616. committer = Signature(env.committer_name, env.committer_email)
  617. message = f'Web installer for {format_artifact_name()}'
  618. tree = index.write_tree()
  619. commit = repo.create_commit(reference, author, committer, message, tree,[repo.head.target])
  620. origin:Remote=repo.remotes['origin']
  621. logger.info(f'Pushing commit {format_commit(repo[commit])} to url {origin.url}')
  622. credentials = UserPass(token, 'x-oauth-basic') # passing credentials
  623. remote:Remote = repo.remotes['origin']
  624. remote.credentials = credentials
  625. remote.push([reference],callbacks= RemoteCallbacks(UserPass(token, 'x-oauth-basic')))
  626. else:
  627. logger.warning(f'No change found. Skipping update')
  628. def update_files(target_artifacts:str,manif_name:str,source:str):
  629. new_list:dict = get_new_file_names(manif_name, os.path.abspath(source))
  630. if os.path.exists(target_artifacts):
  631. logger.info(f'Removing obsolete files from {target_artifacts}')
  632. for f in get_recursive_list(target_artifacts):
  633. if f not in new_list.keys():
  634. full_target = os.path.join(target_artifacts,f)
  635. logger.warning(f'Removing obsolete file {f}')
  636. os.remove(full_target)
  637. else:
  638. logger.info(f'Creating target folder {target_artifacts}')
  639. os.makedirs(target_artifacts, exist_ok=True)
  640. logger.info(f'Copying installer files to {target_artifacts}:')
  641. copy_no_overwrite(os.path.abspath(source), target_artifacts)
  642. def handle_pushinstaller(args):
  643. set_workdir(args)
  644. logger.info('Pushing web installer updates... ')
  645. target_artifacts = os.path.join(args.target,args.artifacts)
  646. if os.path.exists(args.target):
  647. logger.info(f'Removing files (if any) from {args.target}')
  648. delete_folder(args.target)
  649. logger.info(f'Cloning from {args.url} into {args.target}')
  650. repo = pygit2.clone_repository(args.url,args.target)
  651. repo.checkout_head()
  652. update_files(target_artifacts,args.manif_name,args.source)
  653. push_if_change(repo,args.token)
  654. repo.state_cleanup()
  655. def handle_show(args):
  656. logger.info('Show')
  657. def extract_files_from_archive(url):
  658. tempfolder= tempfile.mkdtemp()
  659. platform = requests.get(url)
  660. z = zipfile.ZipFile(io.BytesIO(platform.content))
  661. z.extractall(tempfolder)
  662. return tempfolder
  663. def handle_list_files(args):
  664. print(f'Content of {args.cwd}:')
  665. print('\n'.join(get_file_list(args.cwd)))
  666. parser_environment.set_defaults(func=handle_environment, cmd='environment')
  667. parser_artifacts.set_defaults(func=handle_artifacts, cmd='artifacts')
  668. parser_manifest.set_defaults(func=handle_manifest, cmd='manifest')
  669. parser_pushinstaller.set_defaults(func=handle_pushinstaller, cmd='installer')
  670. parser_show.set_defaults(func=handle_show, cmd='show')
  671. parser_build_flags.set_defaults(func=handle_build_flags, cmd='build_flags')
  672. parser_dir.set_defaults(func=handle_list_files, cmd='list_files')
  673. def main():
  674. args = parser.parse_args()
  675. logger.setLevel(logging.getLevelName(args.loglevel))
  676. logger.info(f'build_tools version : {tool_version}')
  677. logger.debug(f'Processing command {args.command}')
  678. func:Callable = getattr(args, 'func', None)
  679. if func is not None:
  680. # Call whatever subcommand function was selected
  681. func(args)
  682. else:
  683. # No subcommand was provided, so call help
  684. parser.print_usage()
  685. if __name__ == '__main__':
  686. main()