build_tools.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735
  1. #!/usr/bin/env python
  2. from json import JSONDecodeError
  3. import math
  4. import pathlib
  5. import time
  6. from typing import Callable,Dict, Union
  7. import pkg_resources
  8. import sys
  9. import os
  10. import io
  11. from os import stat_result, walk
  12. try:
  13. import argparse
  14. import collections
  15. import copy
  16. import enum
  17. import glob
  18. import json
  19. import logging
  20. import re
  21. import shutil
  22. import stat
  23. import tempfile
  24. import zipfile
  25. from ast import literal_eval
  26. from collections import namedtuple
  27. from datetime import datetime, timedelta, timezone
  28. from json import JSONDecoder
  29. from operator import contains
  30. from platform import platform, release
  31. from pydoc import describe
  32. from time import strftime
  33. from typing import OrderedDict
  34. from urllib import response
  35. from urllib.parse import urlparse
  36. from urllib.request import Request
  37. from webbrowser import get
  38. import pygit2
  39. from pygit2 import Commit,Repository,GitError,Reference,UserPass,Index,Signature,RemoteCallbacks, Remote
  40. import requests
  41. from genericpath import isdir
  42. except ImportError as ex:
  43. print(f'::error::Failed importing module {ex.name}, using interpreter {sys.executable}. \n Installed packages:')
  44. installed_packages = pkg_resources.working_set
  45. installed_packages_list = sorted(["%s==%s" % (i.key, i.version) for i in installed_packages])
  46. print('\n'.join(installed_packages_list))
  47. print(f'Environment: ')
  48. envlist="\n".join( [f"{k}={v}" for k,v in sorted(os.environ.items())])
  49. print(f'{envlist}')
  50. raise
  51. FORMAT = '%(asctime)s %(message)s'
  52. logging.basicConfig(format=FORMAT)
  53. logger:logging.Logger = logging.getLogger(__name__)
  54. github_env= type('', (), {})()
  55. tool_version= "1.0.5"
  56. manifest={
  57. "name": "",
  58. "version": "",
  59. "home_assistant_domain": "slim_player",
  60. "funding_url": "https://esphome.io/guides/supporters.html",
  61. "builds": [
  62. {
  63. "chipFamily": "ESP32",
  64. "parts": [
  65. ]
  66. }
  67. ]
  68. }
  69. artifacts_formats_outdir= '$OUTDIR'
  70. artifacts_formats_prefix= '$PREFIX'
  71. artifacts_formats = [
  72. ['build/squeezelite.bin', '$OUTDIR/$PREFIX-squeezelite.bin'],
  73. ['build/recovery.bin', '$OUTDIR/$PREFIX-recovery.bin'],
  74. ['build/ota_data_initial.bin', '$OUTDIR/$PREFIX-ota_data_initial.bin'],
  75. ['build/bootloader/bootloader.bin', '$OUTDIR/$PREFIX-bootloader.bin'],
  76. ['build/partition_table/partition-table.bin ', '$OUTDIR/$PREFIX-partition-table.bin'],
  77. ]
  78. class AttributeDict(dict):
  79. __slots__ = ()
  80. def __getattr__(self, name:str):
  81. try:
  82. return self[name.upper()]
  83. except Exception:
  84. try:
  85. return self[name.lower()]
  86. except Exception:
  87. for attr in self.keys():
  88. if name.lower() == attr.replace("'","").lower() :
  89. return self[attr]
  90. __setattr__ = dict.__setitem__
  91. parser = argparse.ArgumentParser(description='Handles some parts of the squeezelite-esp32 build process')
  92. parser.add_argument('--cwd', type=str,help='Working directory', default=os.getcwd())
  93. parser.add_argument('--loglevel', type=str,choices={'CRITICAL','ERROR','WARNING','INFO','DEBUG','NOTSET'}, help='Logging level', default='INFO')
  94. subparsers = parser.add_subparsers( dest='command', required=True)
  95. parser_manifest = subparsers.add_parser("manifest",
  96. add_help=False,
  97. description="Manifest parser",
  98. help="Handles the web installer manifest creation")
  99. parser_manifest.add_argument('--flash_file', required=True, type=str,help='The file path which contains the firmware flashing definition')
  100. parser_manifest.add_argument('--max_count', type=int,help='The maximum number of releases to keep', default=3)
  101. parser_manifest.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  102. parser_manifest.add_argument('--outdir', required=True,type=str,help='Output directory for files and manifests')
  103. parser_artifacts = subparsers.add_parser("artifacts",
  104. add_help=False,
  105. description="Artifacts parser",
  106. help="Handles the creation of artifacts files")
  107. parser_artifacts.add_argument('--outdir', type=str,help='Output directory for artifact files', default='./artifacts/')
  108. parser_pushinstaller = subparsers.add_parser("pushinstaller",
  109. add_help=False,
  110. description="Web Installer Checkout parser",
  111. help="Handles the creation of artifacts files")
  112. parser_pushinstaller.add_argument('--target', type=str,help='Output directory for web installer repository', default='./web_installer/')
  113. parser_pushinstaller.add_argument('--artifacts', type=str,help='Target subdirectory for web installer artifacts', default='./web_installer/')
  114. parser_pushinstaller.add_argument('--source', type=str,help='Source directory for the installer artifacts', default='./web_installer/')
  115. parser_pushinstaller.add_argument('--url', type=str,help='Web Installer clone url ', default='https://github.com/sle118/squeezelite-esp32-installer.git')
  116. parser_pushinstaller.add_argument('--web_installer_branch', type=str,help='Web Installer branch to use ', default='main')
  117. parser_pushinstaller.add_argument('--token', type=str,help='Auth token for pushing changes')
  118. parser_pushinstaller.add_argument('--flash_file', type=str,help='Manifest json file path')
  119. parser_pushinstaller.add_argument('--manif_name', required=True,type=str,help='Manifest files name and prefix')
  120. parser_environment = subparsers.add_parser("environment",
  121. add_help=False,
  122. description="Environment parser",
  123. help="Updates the build environment")
  124. parser_environment.add_argument('--env_file', type=str,help='Environment File', default=os.environ.get('GITHUB_ENV'))
  125. parser_environment.add_argument('--build', required=True, type=int,help='The build number')
  126. parser_environment.add_argument('--node', required=True, type=str,help='The matrix node being built')
  127. parser_environment.add_argument('--depth', required=True, type=int,help='The bit depth being built')
  128. parser_environment.add_argument('--major', type=str,help='Major version', default='2')
  129. parser_environment.add_argument('--docker', type=str,help='Docker image to use',default='sle118/squeezelite-esp32-idfv43')
  130. parser_show = subparsers.add_parser("show",
  131. add_help=False,
  132. description="Show parser",
  133. help="Show the build environment")
  134. parser_build_flags = subparsers.add_parser("build_flags",
  135. add_help=False,
  136. description="Build Flags",
  137. help="Updates the build environment with build flags")
  138. parser_build_flags.add_argument('--mock', action='store_true',help='Mock release')
  139. parser_build_flags.add_argument('--force', action='store_true',help='Force a release build')
  140. parser_build_flags.add_argument('--ui_build', action='store_true',help='Include building the web UI')
  141. def get_github_data(repo:Repository,api):
  142. base_url = urlparse(repo.remotes['origin'].url)
  143. url = f"https://api.github.com/repos{base_url.path.split('.')[-2]}/{api}"
  144. resp= requests.get(url, headers={"Content-Type": "application/vnd.github.v3+json"})
  145. return json.loads(resp.text)
  146. def dump_directory(dir_path):
  147. # list to store files name
  148. res = []
  149. for (dir_path, dir_names, file_names) in walk(dir_path):
  150. res.extend(file_names)
  151. print(res)
  152. class ReleaseDetails():
  153. version:str
  154. idf:str
  155. platform:str
  156. branch:str
  157. bitrate:str
  158. def __init__(self,tag:str) -> None:
  159. self.version,self.idf,self.platform,self.branch=tag.split('#')
  160. try:
  161. self.version,self.bitrate = self.version.split('-')
  162. except Exception:
  163. pass
  164. def get_attributes(self):
  165. return {
  166. 'version': self.version,
  167. 'idf': self.idf,
  168. 'platform': self.platform,
  169. 'branch': self.branch,
  170. 'bitrate': self.bitrate
  171. }
  172. def format_prefix(self)->str:
  173. return f'{self.branch}-{self.platform}-{self.version}'
  174. def get_full_platform(self):
  175. return f"{self.platform}{f'-{self.bitrate}' if self.bitrate is not None else ''}"
  176. class BinFile():
  177. name:str
  178. offset:int
  179. source_full_path:str
  180. target_name:str
  181. target_fullpath:str
  182. artifact_relpath:str
  183. def __init__(self, source_path,file_build_path:str, offset:int,release_details:ReleaseDetails,build_dir) -> None:
  184. self.name = os.path.basename(file_build_path).rstrip()
  185. self.artifact_relpath = os.path.relpath(file_build_path,build_dir).rstrip()
  186. self.source_path = source_path
  187. self.source_full_path = os.path.join(source_path,file_build_path).rstrip()
  188. self.offset = offset
  189. self.target_name= f'{release_details.format_prefix()}-{self.name}'.rstrip()
  190. def get_manifest(self):
  191. return { "path": self.target_name , "offset": self.offset }
  192. def copy(self,target_folder)->str:
  193. self.target_fullpath=os.path.join(target_folder,self.target_name)
  194. logger.debug(f'file {self.source_full_path} will be copied to {self.target_fullpath}')
  195. try:
  196. os.makedirs(target_folder, exist_ok=True)
  197. shutil.copyfile(self.source_full_path, self.target_fullpath, follow_symlinks=True)
  198. except Exception as ex:
  199. print(f'::error::Error while copying {self.source_full_path} to {self.target_fullpath}' )
  200. print(f'::error::Content of {os.path.dirname(self.source_full_path.rstrip())}:')
  201. print('\n::error::'.join(get_file_list(os.path.dirname(self.source_full_path.rstrip()))))
  202. raise
  203. return self.target_fullpath
  204. def get_attributes(self):
  205. return {
  206. 'name':self.target_name,
  207. 'offset':self.offset,
  208. 'artifact_relpath':self.artifact_relpath
  209. }
  210. class PlatformRelease():
  211. name:str
  212. description:str
  213. url:str=''
  214. zipfile:str=''
  215. tempfolder:str
  216. release_details:ReleaseDetails
  217. flash_parms={}
  218. build_dir:str
  219. has_artifacts:bool
  220. branch:str
  221. assets:list
  222. bin_files:list
  223. name_prefix:str
  224. def get_manifest_name(self)->str:
  225. return f'{self.name_prefix}-{self.release_details.format_prefix()}.json'
  226. def __init__(self,git_release,flash_parms,build_dir, branch,name_prefix) -> None:
  227. self.name = git_release.tag_name
  228. self.description=git_release.body
  229. self.assets = git_release['assets']
  230. self.has_artifacts = False
  231. self.name_prefix = name_prefix
  232. if len(self.assets)>0:
  233. if self.has_asset_type():
  234. self.url=self.get_asset_from_extension().browser_download_url
  235. if self.has_asset_type('.zip'):
  236. self.zipfile=self.get_asset_from_extension(ext='.zip').browser_download_url
  237. self.has_artifacts = True
  238. self.release_details=ReleaseDetails(git_release.name)
  239. self.bin_files = list()
  240. self.flash_parms = flash_parms
  241. self.build_dir = build_dir
  242. self.branch = branch
  243. def process_files(self,outdir:str)->list:
  244. parts = []
  245. for f in self.bin_files:
  246. f.copy(outdir)
  247. parts.append(f.get_manifest())
  248. def get_asset_from_extension(self,ext='.bin'):
  249. for a in self.assets:
  250. filename=AttributeDict(a).name
  251. file_name, file_extension = os.path.splitext(filename)
  252. if file_extension == ext:
  253. return AttributeDict(a)
  254. return None
  255. def has_asset_type(self,ext='.bin')->bool:
  256. return self.get_asset_from_extension(ext) is not None
  257. def platform(self):
  258. return self.release_details.get_full_platform()
  259. def get_zip_file(self):
  260. self.tempfolder = extract_files_from_archive(self.zipfile)
  261. logger.info(f'Artifacts for {self.name} extracted to {self.tempfolder}')
  262. try:
  263. for artifact in artifacts_formats:
  264. base_name = os.path.basename(artifact[0]).rstrip().lstrip()
  265. self.bin_files.append(BinFile(self.tempfolder,artifact[0],self.flash_parms[base_name],self.release_details,self.build_dir))
  266. has_artifacts = True
  267. except Exception:
  268. self.has_artifacts = False
  269. def cleanup(self):
  270. logger.info(f'removing {self.name} temp directory {self.tempfolder}')
  271. shutil.rmtree(self.tempfolder)
  272. def get_attributes(self):
  273. return {
  274. 'name':self.name,
  275. 'branch':self.branch,
  276. 'description':self.description,
  277. 'url':self.url,
  278. 'zipfile':self.zipfile,
  279. 'release_details':self.release_details.get_attributes(),
  280. 'bin_files': [b.get_attributes() for b in self.bin_files],
  281. 'manifest_name': self.get_manifest_name()
  282. }
  283. class Releases():
  284. _dict:dict = collections.OrderedDict()
  285. maxcount:int =0
  286. branch:str=''
  287. repo:Repository=None
  288. manifest_name:str
  289. def __init__(self,branch:str,maxcount:int=3) -> None:
  290. self.maxcount = maxcount
  291. self.branch = branch
  292. def count(self,value:PlatformRelease)->int:
  293. content=self._dict.get(value.platform())
  294. if content == None:
  295. return 0
  296. return len(content)
  297. def get_platform(self,platform:str)->list:
  298. return self._dict[platform]
  299. def get_platform_keys(self):
  300. return self._dict.keys()
  301. def get_all(self)->list:
  302. result:list=[]
  303. for platform in [self.get_platform(platform) for platform in self.get_platform_keys()]:
  304. for release in platform:
  305. result.append(release)
  306. return result
  307. def append(self,value:PlatformRelease):
  308. # optional processing here
  309. if self.count(value) == 0:
  310. self._dict[value.platform()] = []
  311. if self.should_add(value):
  312. logger.info(f'Adding release {value.name} to the list')
  313. self._dict[value.platform()].append(value)
  314. else:
  315. logger.info(f'Skipping release {value.name}')
  316. def get_attributes(self):
  317. res = []
  318. release:PlatformRelease
  319. for release in self.get_all():
  320. res.append(release.get_attributes())
  321. return res
  322. def get_minlen(self)->int:
  323. return min([len(self.get_platform(p)) for p in self.get_platform_keys()])
  324. def got_all_packages(self)->bool:
  325. return self.get_minlen() >=self.maxcount
  326. def should_add(self,release:PlatformRelease)->bool:
  327. return self.count(release) <=self.maxcount
  328. def add_package(self,package:PlatformRelease, with_artifacts:bool=True):
  329. if self.branch != package.branch:
  330. logger.info(f'Skipping release {package.name} from branch {package.branch}')
  331. elif package.has_artifacts or not with_artifacts:
  332. self.append(package)
  333. @classmethod
  334. def get_last_commit(cls)->Commit:
  335. if cls.repo is None:
  336. cls.get_repository(os.getcwd())
  337. return cls.repo[cls.repo.head.target]
  338. @classmethod
  339. def get_repository(cls,path:str=os.getcwd())->Repository:
  340. if cls.repo is None:
  341. try:
  342. logger.info(f'Opening repository from {path}')
  343. cls.repo=Repository(path=path)
  344. except GitError as ex:
  345. print(f'::error::Error while trying to access the repository.')
  346. print(f'::error::Content of {path}:')
  347. print('\n::error::'.join(get_file_list(path)))
  348. raise
  349. return cls.repo
  350. @classmethod
  351. def resolve_commit(cls,repo:Repository,commit_id:str)->Commit:
  352. commit:Commit
  353. reference:Reference
  354. commit, reference = repo.resolve_refish(commit_id)
  355. return commit
  356. @classmethod
  357. def get_release_branch(cls,repo:Repository,platform_release)->str:
  358. match = [t for t in repo.branches.with_commit(platform_release.target_commitish)]
  359. no_origin = [t for t in match if 'origin' not in t]
  360. if len(no_origin) == 0 and len(match) > 0:
  361. return match[0].split('/')[1]
  362. elif len(no_origin) >0:
  363. return no_origin[0]
  364. return ''
  365. @classmethod
  366. def get_flash_parms(cls,file_path):
  367. flash = parse_json(file_path)
  368. od:collections.OrderedDict = collections.OrderedDict()
  369. for z in flash['flash_files'].items():
  370. base_name:str = os.path.basename(z[1])
  371. od[base_name.rstrip().lstrip()] = literal_eval( z[0])
  372. return collections.OrderedDict(sorted(od.items()))
  373. @classmethod
  374. def get_releases(cls,flash_file_path,maxcount:int,name_prefix):
  375. repo=Releases.get_repository(os.getcwd())
  376. flash_parms = Releases.get_flash_parms(flash_file_path)
  377. packages:Releases = cls(branch=repo.head.shorthand,maxcount=maxcount)
  378. build_dir=os.path.dirname(flash_file_path)
  379. for page in range(1,999):
  380. logger.debug(f'Getting releases page {page}')
  381. releases = get_github_data(repo,f'releases?per_page=50&page={page}')
  382. if len(releases)==0:
  383. logger.debug(f'No more release found for page {page}')
  384. break
  385. for release_entry in [AttributeDict(platform) for platform in releases]:
  386. packages.add_package(PlatformRelease(release_entry,flash_parms,build_dir,Releases.get_release_branch(repo,release_entry),name_prefix))
  387. if packages.got_all_packages():
  388. break
  389. if packages.got_all_packages():
  390. break
  391. return packages
  392. def update(self, *args, **kwargs):
  393. if args:
  394. if len(args) > 1:
  395. raise TypeError("update expected at most 1 arguments, "
  396. "got %d" % len(args))
  397. other = dict(args[0])
  398. for key in other:
  399. self[key] = other[key]
  400. for key in kwargs:
  401. self[key] = kwargs[key]
  402. def setdefault(self, key, value=None):
  403. if key not in self:
  404. self[key] = value
  405. return self[key]
  406. def set_workdir(args):
  407. logger.info(f'setting work dir to: {args.cwd}')
  408. os.chdir(os.path.abspath(args.cwd))
  409. def parse_json(filename:str):
  410. fname = os.path.abspath(filename)
  411. folder:str = os.path.abspath(os.path.dirname(filename))
  412. logger.info(f'Opening json file {fname} from {folder}')
  413. try:
  414. with open(fname) as f:
  415. content=f.read()
  416. logger.debug(f'Loading json\n{content}')
  417. return json.loads(content)
  418. except JSONDecodeError as ex:
  419. print(f'::error::Error parsing {content}')
  420. except Exception as ex:
  421. print(f'::error::Unable to parse flasher args json file. Content of {folder}:')
  422. print('\n::error::'.join(get_file_list(folder)))
  423. raise
  424. def write_github_env(args):
  425. logger.info(f'Writing environment details to {args.env_file}...')
  426. with open(args.env_file, "w") as env_file:
  427. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  428. line=f'{attr}={getattr(github_env,attr)}'
  429. logger.info(line)
  430. env_file.write(f'{line}\n')
  431. os.environ[attr] = str(getattr(github_env,attr))
  432. logger.info(f'Done writing environment details to {args.env_file}!')
  433. def set_workflow_output(args):
  434. logger.info(f'Outputting job variables ...')
  435. for attr in [attr for attr in dir(github_env) if not attr.startswith('_')]:
  436. # use print instead of logger, as we need the raw output without the date/time prefix from logging
  437. print(f'::set-output name={attr}::{getattr(github_env,attr)}')
  438. os.environ[attr] = str(getattr(github_env,attr))
  439. logger.info(f'Done outputting job variables!')
  440. def format_commit(commit):
  441. #463a9d8b7 Merge branch 'bugfix/ci_deploy_tags_v4.0' into 'release/v4.0' (2020-01-11T14:08:55+08:00)
  442. dt = datetime.fromtimestamp(float(commit.author.time), timezone( timedelta(minutes=commit.author.offset) ))
  443. timestr = dt.strftime('%c%z')
  444. cmesg= commit.message.replace('\n', ' ' )
  445. return f'{commit.short_id} {cmesg} ({timestr}) <{commit.author.name}>'.replace(' ', ' ', )
  446. def format_artifact_name(base_name:str='',args = AttributeDict(os.environ)):
  447. return f'{base_name}{args.branch_name}-{args.node}-{args.depth}-{args.major}{args.build}'
  448. def handle_build_flags(args):
  449. set_workdir(args)
  450. logger.info('Setting global build flags')
  451. last:Commit = Releases.get_last_commit()
  452. commit_message:str= last.message.replace('\n', ' ')
  453. github_env.mock=1 if args.mock else 0
  454. github_env.release_flag=1 if args.mock or args.force or 'release' in commit_message.lower() else 0
  455. github_env.ui_build=1 if args.mock or args.ui_build or '[ui-build]' in commit_message.lower() or github_env.release_flag==1 else 0
  456. set_workflow_output(github_env)
  457. def handle_environment(args):
  458. set_workdir(args)
  459. logger.info('Setting environment variables...')
  460. last:Commit = Releases.get_last_commit()
  461. commit_message:str= last.message.replace('\n', ' ')
  462. github_env.author_name=last.author.name
  463. github_env.author_email=last.author.email
  464. github_env.committer_name=last.committer.name
  465. github_env.committer_email=last.committer.email
  466. github_env.node=args.node
  467. github_env.depth=args.depth
  468. github_env.major=args.major
  469. github_env.build=args.build
  470. github_env.DEPTH=args.depth
  471. github_env.TARGET_BUILD_NAME=args.node
  472. github_env.build_version_prefix=args.major
  473. github_env.branch_name=re.sub('[^a-zA-Z0-9\-~!@_\.]', '', Releases.get_repository().head.shorthand)
  474. github_env.BUILD_NUMBER=str(args.build)
  475. github_env.tag=f'{args.node}.{args.depth}.{args.build}.{github_env.branch_name}'.rstrip()
  476. github_env.last_commit=commit_message
  477. github_env.DOCKER_IMAGE_NAME=args.docker
  478. github_env.name=f"{args.major}.{str(args.build)}-{args.depth}#v4.3#{args.node}#{github_env.branch_name}"
  479. github_env.artifact_prefix=format_artifact_name('squeezelite-esp32-',github_env)
  480. github_env.artifact_file_name=f"{github_env.artifact_prefix}.zip"
  481. github_env.artifact_bin_file_name=f"{github_env.artifact_prefix}.bin"
  482. github_env.PROJECT_VER=f'{args.node}-{ args.build }'
  483. github_env.description='### Revision Log<br><<~EOD\n'+'<br>\n'.join(format_commit(c) for i,c in enumerate(Releases.get_repository().walk(last.id,pygit2.GIT_SORT_TIME)) if i<10)+'\n~EOD'
  484. write_github_env(args)
  485. def handle_artifacts(args):
  486. set_workdir(args)
  487. logger.info(f'Handling artifacts')
  488. for attr in artifacts_formats:
  489. target:str=attr[1].replace(artifacts_formats_outdir,args.outdir).replace(artifacts_formats_prefix,format_artifact_name())
  490. logger.debug(f'file {attr[0]} will be copied to {target}')
  491. try:
  492. os.makedirs(os.path.dirname(target), exist_ok=True)
  493. shutil.copyfile(attr[0].rstrip(), target, follow_symlinks=True)
  494. except Exception as ex:
  495. print(f'::error::Error while copying to {target}' )
  496. print(f'::error::Content of {os.path.dirname(attr[0].rstrip())}:')
  497. print('\n::error::'.join(get_file_list(os.path.dirname(attr[0].rstrip()))))
  498. raise
  499. def delete_folder(path):
  500. '''Remov Read Only Files'''
  501. for root, dirs, files in os.walk(path,topdown=True):
  502. for dir in dirs:
  503. fulldirpath=os.path.join(root, dir)
  504. logger.debug(f'Drilling down in {fulldirpath}')
  505. delete_folder(fulldirpath)
  506. for fname in files:
  507. full_path = os.path.join(root, fname)
  508. logger.debug(f'Setting file read/write {full_path}')
  509. os.chmod(full_path ,stat.S_IWRITE)
  510. logger.debug(f'Deleting file {full_path}')
  511. os.remove(full_path)
  512. if os.path.exists(path):
  513. logger.debug(f'Changing folder read/write {path}')
  514. os.chmod(path ,stat.S_IWRITE)
  515. logger.warning(f'Deleting Folder {path}')
  516. os.rmdir(path)
  517. def get_file_list(path)->list:
  518. outlist:list=[]
  519. for root, dirs, files in os.walk(path,topdown=True):
  520. for dir in dirs:
  521. outlist.append(f'Content of {os.path.join(root, dir)}')
  522. get_file_list(os.path.join(root, dir))
  523. for fname in files:
  524. full_name=os.path.join(root, fname)
  525. fstat:os.stat_result = pathlib.Path(full_name).stat()
  526. # Convert file size to MB, KB or Bytes
  527. if (fstat.st_size > 1024 * 1024):
  528. fsize = math.ceil(fstat.st_size / (1024 * 1024))
  529. unit = "MB"
  530. elif (fstat.st_size > 1024):
  531. fsize = math.ceil(fstat.st_size / 1024)
  532. unit = "KB"
  533. else:
  534. fsize = fstat.st_size
  535. unit = "B"
  536. mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
  537. outlist.append('\t{:15.80s}{:8d} {:2s} {:18s}'.format(fname,fsize,unit,mtime))
  538. if os.path.exists(path):
  539. outlist.append(path)
  540. outlist.sort()
  541. return outlist
  542. def get_recursive_list(path)->list:
  543. outlist:list=[]
  544. for root, dirs, files in os.walk(path,topdown=True):
  545. for dir in dirs:
  546. get_file_list(os.path.join(root, dir))
  547. for fname in files:
  548. outlist.append(fname)
  549. # if os.path.exists(path):
  550. # outlist.append(path)
  551. outlist.sort()
  552. return outlist
  553. def handle_manifest(args):
  554. set_workdir(args)
  555. logger.info(f'Creating the web installer manifest')
  556. env = AttributeDict(os.environ)
  557. if not os.path.exists(os.path.dirname(args.outdir)):
  558. logger.info(f'Creating target folder {args.outdir}')
  559. os.makedirs(args.outdir, exist_ok=True)
  560. releases:Releases = Releases.get_releases(args.flash_file, args.max_count,args.manif_name)
  561. release:PlatformRelease
  562. for release in releases.get_all():
  563. release.get_zip_file()
  564. man = copy.deepcopy(manifest)
  565. man['manifest_name'] = release.get_manifest_name()
  566. man['builds'][0]['parts'] = release.process_files(args.outdir)
  567. man['name'] = release.platform()
  568. man['version'] = release.release_details.version
  569. logger.debug(f'Generated manifest: \n{json.dumps(man,indent=4)}')
  570. fullpath=os.path.join(args.outdir,release.get_manifest_name())
  571. logger.info(f'Writing manifest to {fullpath}')
  572. with open(fullpath, "w") as f:
  573. json.dump(man,f,indent=4)
  574. release.cleanup()
  575. mainmanifest=os.path.join(args.outdir,args.manif_name)
  576. logger.info(f'Writing main manifest {mainmanifest}')
  577. with open(mainmanifest,'w') as f:
  578. json.dump(releases.get_attributes(),f,indent=4)
  579. def get_new_file_names(manifest:str,source:str)->collections.OrderedDict():
  580. artifacts = parse_json(os.path.join(source,manifest))
  581. new_release_files:dict = collections.OrderedDict()
  582. for artifact in artifacts:
  583. for name in [f["name"] for f in artifact["bin_files"]]:
  584. new_release_files[name] = artifact
  585. new_release_files[artifact['manifest_name']] = artifact['name']
  586. return new_release_files
  587. def copy_no_overwrite(source:str,target:str) :
  588. sfiles = os.listdir(source)
  589. for f in sfiles:
  590. source_file = os.path.join(source,f)
  591. target_file = os.path.join(target,f)
  592. if not os.path.exists(target_file):
  593. logger.info(f'Copying {f} to target')
  594. shutil.copy(source_file, target_file)
  595. else:
  596. logger.debug(f'Skipping existing file {f}')
  597. def get_changed_items(repo:Repository)->Dict:
  598. changed_filemode_status_code: int = pygit2.GIT_FILEMODE_TREE
  599. original_status_dict: Dict[str, int] = repo.status()
  600. # transfer any non-filemode changes to a new dictionary
  601. status_dict: Dict[str, int] = {}
  602. for filename, code in original_status_dict.items():
  603. if code != changed_filemode_status_code:
  604. status_dict[filename] = code
  605. return status_dict
  606. def is_dirty(repo:Repository)->bool:
  607. return len(get_changed_items(repo)) > 0
  608. def push_if_change(repo:Repository, token:str):
  609. if is_dirty(repo):
  610. logger.info(f'Changes found. Preparing commit')
  611. env = AttributeDict(os.environ)
  612. index:Index = repo.index
  613. index.add_all()
  614. index.write()
  615. reference=repo.head.name
  616. author = Signature(env.author_name,env.author_email)
  617. committer = Signature(env.committer_name, env.committer_email)
  618. message = f'Web installer for {format_artifact_name()}'
  619. tree = index.write_tree()
  620. commit = repo.create_commit(reference, author, committer, message, tree,[repo.head.target])
  621. origin:Remote=repo.remotes['origin']
  622. logger.info(f'Pushing commit {format_commit(repo[commit])} to url {origin.url}')
  623. credentials = UserPass(token, 'x-oauth-basic') # passing credentials
  624. remote:Remote = repo.remotes['origin']
  625. remote.credentials = credentials
  626. remote.push([reference],callbacks= RemoteCallbacks(UserPass(token, 'x-oauth-basic')))
  627. else:
  628. logger.warning(f'No change found. Skipping update')
  629. def update_files(target_artifacts:str,manif_name:str,source:str):
  630. new_list:dict = get_new_file_names(manif_name, os.path.abspath(source))
  631. if os.path.exists(target_artifacts):
  632. logger.info(f'Removing obsolete files from {target_artifacts}')
  633. for f in get_recursive_list(target_artifacts):
  634. if f not in new_list.keys():
  635. full_target = os.path.join(target_artifacts,f)
  636. logger.warning(f'Removing obsolete file {f}')
  637. os.remove(full_target)
  638. else:
  639. logger.info(f'Creating target folder {target_artifacts}')
  640. os.makedirs(target_artifacts, exist_ok=True)
  641. logger.info(f'Copying installer files to {target_artifacts}:')
  642. copy_no_overwrite(os.path.abspath(source), target_artifacts)
  643. def handle_pushinstaller(args):
  644. set_workdir(args)
  645. logger.info('Pushing web installer updates... ')
  646. target_artifacts = os.path.join(args.target,args.artifacts)
  647. if os.path.exists(args.target):
  648. logger.info(f'Removing files (if any) from {args.target}')
  649. delete_folder(args.target)
  650. logger.info(f'Cloning from {args.url} into {args.target}')
  651. repo = pygit2.clone_repository(args.url,args.target)
  652. repo.checkout_head()
  653. update_files(target_artifacts,args.manif_name,args.source)
  654. push_if_change(repo,args.token)
  655. repo.state_cleanup()
  656. def handle_show(args):
  657. logger.info('Show')
  658. def extract_files_from_archive(url):
  659. tempfolder= tempfile.mkdtemp()
  660. platform = requests.get(url)
  661. z = zipfile.ZipFile(io.BytesIO(platform.content))
  662. z.extractall(tempfolder)
  663. return tempfolder
  664. parser_environment.set_defaults(func=handle_environment, cmd='environment')
  665. parser_artifacts.set_defaults(func=handle_artifacts, cmd='artifacts')
  666. parser_manifest.set_defaults(func=handle_manifest, cmd='manifest')
  667. parser_pushinstaller.set_defaults(func=handle_pushinstaller, cmd='installer')
  668. parser_show.set_defaults(func=handle_show, cmd='show')
  669. parser_build_flags.set_defaults(func=handle_build_flags, cmd='build_flags')
  670. def main():
  671. args = parser.parse_args()
  672. logger.setLevel(logging.getLevelName(args.loglevel))
  673. logger.info(f'build_tools version : {tool_version}')
  674. logger.debug(f'Processing command {args.command}')
  675. func:Callable = getattr(args, 'func', None)
  676. if func is not None:
  677. # Call whatever subcommand function was selected
  678. func(args)
  679. else:
  680. # No subcommand was provided, so call help
  681. parser.print_usage()
  682. if __name__ == '__main__':
  683. main()