distnet.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430
  1. #! /usr/bin/env python
  2. # encoding: utf-8
  3. """
  4. waf-powered distributed network builds, with a network cache.
  5. Caching files from a server has advantages over a NFS/Samba shared folder:
  6. - builds are much faster because they use local files
  7. - builds just continue to work in case of a network glitch
  8. - permissions are much simpler to manage
  9. """
  10. import os, urllib, tarfile, re, shutil, tempfile, sys
  11. from collections import OrderedDict
  12. from waflib import Context, Utils, Logs
  13. try:
  14. from urllib.parse import urlencode
  15. except ImportError:
  16. urlencode = urllib.urlencode
  17. def safe_urlencode(data):
  18. x = urlencode(data)
  19. try:
  20. x = x.encode('utf-8')
  21. except Exception:
  22. pass
  23. return x
  24. try:
  25. from urllib.error import URLError
  26. except ImportError:
  27. from urllib2 import URLError
  28. try:
  29. from urllib.request import Request, urlopen
  30. except ImportError:
  31. from urllib2 import Request, urlopen
  32. DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
  33. DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
  34. TARFORMAT = 'w:bz2'
  35. TIMEOUT = 60
  36. REQUIRES = 'requires.txt'
  37. re_com = re.compile('\s*#.*', re.M)
  38. def total_version_order(num):
  39. lst = num.split('.')
  40. template = '%10s' * len(lst)
  41. ret = template % tuple(lst)
  42. return ret
  43. def get_distnet_cache():
  44. return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
  45. def get_server_url():
  46. return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
  47. def get_download_url():
  48. return '%s/download.py' % get_server_url()
  49. def get_upload_url():
  50. return '%s/upload.py' % get_server_url()
  51. def get_resolve_url():
  52. return '%s/resolve.py' % get_server_url()
  53. def send_package_name():
  54. out = getattr(Context.g_module, 'out', 'build')
  55. pkgfile = '%s/package_to_upload.tarfile' % out
  56. return pkgfile
  57. class package(Context.Context):
  58. fun = 'package'
  59. cmd = 'package'
  60. def execute(self):
  61. try:
  62. files = self.files
  63. except AttributeError:
  64. files = self.files = []
  65. Context.Context.execute(self)
  66. pkgfile = send_package_name()
  67. if not pkgfile in files:
  68. if not REQUIRES in files:
  69. files.append(REQUIRES)
  70. self.make_tarfile(pkgfile, files, add_to_package=False)
  71. def make_tarfile(self, filename, files, **kw):
  72. if kw.get('add_to_package', True):
  73. self.files.append(filename)
  74. with tarfile.open(filename, TARFORMAT) as tar:
  75. endname = os.path.split(filename)[-1]
  76. endname = endname.split('.')[0] + '/'
  77. for x in files:
  78. tarinfo = tar.gettarinfo(x, x)
  79. tarinfo.uid = tarinfo.gid = 0
  80. tarinfo.uname = tarinfo.gname = 'root'
  81. tarinfo.size = os.stat(x).st_size
  82. # TODO - more archive creation options?
  83. if kw.get('bare', True):
  84. tarinfo.name = os.path.split(x)[1]
  85. else:
  86. tarinfo.name = endname + x # todo, if tuple, then..
  87. Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
  88. with open(x, 'rb') as f:
  89. tar.addfile(tarinfo, f)
  90. Logs.info('Created %s', filename)
  91. class publish(Context.Context):
  92. fun = 'publish'
  93. cmd = 'publish'
  94. def execute(self):
  95. if hasattr(Context.g_module, 'publish'):
  96. Context.Context.execute(self)
  97. mod = Context.g_module
  98. rfile = getattr(self, 'rfile', send_package_name())
  99. if not os.path.isfile(rfile):
  100. self.fatal('Create the release file with "waf release" first! %r' % rfile)
  101. fdata = Utils.readf(rfile, m='rb')
  102. data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
  103. req = Request(get_upload_url(), data)
  104. response = urlopen(req, timeout=TIMEOUT)
  105. data = response.read().strip()
  106. if sys.hexversion>0x300000f:
  107. data = data.decode('utf-8')
  108. if data != 'ok':
  109. self.fatal('Could not publish the package %r' % data)
  110. class constraint(object):
  111. def __init__(self, line=''):
  112. self.required_line = line
  113. self.info = []
  114. line = line.strip()
  115. if not line:
  116. return
  117. lst = line.split(',')
  118. if lst:
  119. self.pkgname = lst[0]
  120. self.required_version = lst[1]
  121. for k in lst:
  122. a, b, c = k.partition('=')
  123. if a and c:
  124. self.info.append((a, c))
  125. def __str__(self):
  126. buf = []
  127. buf.append(self.pkgname)
  128. buf.append(self.required_version)
  129. for k in self.info:
  130. buf.append('%s=%s' % k)
  131. return ','.join(buf)
  132. def __repr__(self):
  133. return "requires %s-%s" % (self.pkgname, self.required_version)
  134. def human_display(self, pkgname, pkgver):
  135. return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
  136. def why(self):
  137. ret = []
  138. for x in self.info:
  139. if x[0] == 'reason':
  140. ret.append(x[1])
  141. return ret
  142. def add_reason(self, reason):
  143. self.info.append(('reason', reason))
  144. def parse_constraints(text):
  145. assert(text is not None)
  146. constraints = []
  147. text = re.sub(re_com, '', text)
  148. lines = text.splitlines()
  149. for line in lines:
  150. line = line.strip()
  151. if not line:
  152. continue
  153. constraints.append(constraint(line))
  154. return constraints
  155. def list_package_versions(cachedir, pkgname):
  156. pkgdir = os.path.join(cachedir, pkgname)
  157. try:
  158. versions = os.listdir(pkgdir)
  159. except OSError:
  160. return []
  161. versions.sort(key=total_version_order)
  162. versions.reverse()
  163. return versions
  164. class package_reader(Context.Context):
  165. cmd = 'solver'
  166. fun = 'solver'
  167. def __init__(self, **kw):
  168. Context.Context.__init__(self, **kw)
  169. self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
  170. self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
  171. self.cache_constraints = {}
  172. self.constraints = []
  173. def compute_dependencies(self, filename=REQUIRES):
  174. text = Utils.readf(filename)
  175. data = safe_urlencode([('text', text)])
  176. if '--offline' in sys.argv:
  177. self.constraints = self.local_resolve(text)
  178. else:
  179. req = Request(get_resolve_url(), data)
  180. try:
  181. response = urlopen(req, timeout=TIMEOUT)
  182. except URLError as e:
  183. Logs.warn('The package server is down! %r', e)
  184. self.constraints = self.local_resolve(text)
  185. else:
  186. ret = response.read()
  187. try:
  188. ret = ret.decode('utf-8')
  189. except Exception:
  190. pass
  191. self.trace(ret)
  192. self.constraints = parse_constraints(ret)
  193. self.check_errors()
  194. def check_errors(self):
  195. errors = False
  196. for c in self.constraints:
  197. if not c.required_version:
  198. errors = True
  199. reasons = c.why()
  200. if len(reasons) == 1:
  201. Logs.error('%s but no matching package could be found in this repository', reasons[0])
  202. else:
  203. Logs.error('Conflicts on package %r:', c.pkgname)
  204. for r in reasons:
  205. Logs.error(' %s', r)
  206. if errors:
  207. self.fatal('The package requirements cannot be satisfied!')
  208. def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
  209. try:
  210. return self.cache_constraints[(pkgname, pkgver)]
  211. except KeyError:
  212. text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
  213. ret = parse_constraints(text)
  214. self.cache_constraints[(pkgname, pkgver)] = ret
  215. return ret
  216. def apply_constraint(self, domain, constraint):
  217. vname = constraint.required_version.replace('*', '.*')
  218. rev = re.compile(vname, re.M)
  219. ret = [x for x in domain if rev.match(x)]
  220. return ret
  221. def trace(self, *k):
  222. if getattr(self, 'debug', None):
  223. Logs.error(*k)
  224. def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
  225. # breadth first search
  226. n_packages_to_versions = dict(packages_to_versions)
  227. n_packages_to_constraints = dict(packages_to_constraints)
  228. self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
  229. done = done + [pkgname]
  230. constraints = self.load_constraints(pkgname, pkgver)
  231. self.trace("constraints %r" % constraints)
  232. for k in constraints:
  233. try:
  234. domain = n_packages_to_versions[k.pkgname]
  235. except KeyError:
  236. domain = list_package_versions(get_distnet_cache(), k.pkgname)
  237. self.trace("constraints?")
  238. if not k.pkgname in done:
  239. todo = todo + [k.pkgname]
  240. self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
  241. # apply the constraint
  242. domain = self.apply_constraint(domain, k)
  243. self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
  244. n_packages_to_versions[k.pkgname] = domain
  245. # then store the constraint applied
  246. constraints = list(packages_to_constraints.get(k.pkgname, []))
  247. constraints.append((pkgname, pkgver, k))
  248. n_packages_to_constraints[k.pkgname] = constraints
  249. if not domain:
  250. self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
  251. return (n_packages_to_versions, n_packages_to_constraints)
  252. # next package on the todo list
  253. if not todo:
  254. return (n_packages_to_versions, n_packages_to_constraints)
  255. n_pkgname = todo[0]
  256. n_pkgver = n_packages_to_versions[n_pkgname][0]
  257. tmp = dict(n_packages_to_versions)
  258. tmp[n_pkgname] = [n_pkgver]
  259. self.trace("fixed point %s" % n_pkgname)
  260. return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
  261. def get_results(self):
  262. return '\n'.join([str(c) for c in self.constraints])
  263. def solution_to_constraints(self, versions, constraints):
  264. solution = []
  265. for p in versions:
  266. c = constraint()
  267. solution.append(c)
  268. c.pkgname = p
  269. if versions[p]:
  270. c.required_version = versions[p][0]
  271. else:
  272. c.required_version = ''
  273. for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
  274. c.add_reason(c2.human_display(from_pkgname, from_pkgver))
  275. return solution
  276. def local_resolve(self, text):
  277. self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
  278. p2v = OrderedDict({self.myproject: [self.myversion]})
  279. (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
  280. return self.solution_to_constraints(versions, constraints)
  281. def download_to_file(self, pkgname, pkgver, subdir, tmp):
  282. data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
  283. req = urlopen(get_download_url(), data, timeout=TIMEOUT)
  284. with open(tmp, 'wb') as f:
  285. while True:
  286. buf = req.read(8192)
  287. if not buf:
  288. break
  289. f.write(buf)
  290. def extract_tar(self, subdir, pkgdir, tmpfile):
  291. with tarfile.open(tmpfile) as f:
  292. temp = tempfile.mkdtemp(dir=pkgdir)
  293. try:
  294. f.extractall(temp)
  295. os.rename(temp, os.path.join(pkgdir, subdir))
  296. finally:
  297. try:
  298. shutil.rmtree(temp)
  299. except Exception:
  300. pass
  301. def get_pkg_dir(self, pkgname, pkgver, subdir):
  302. pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
  303. if not os.path.isdir(pkgdir):
  304. os.makedirs(pkgdir)
  305. target = os.path.join(pkgdir, subdir)
  306. if os.path.exists(target):
  307. return target
  308. (fd, tmp) = tempfile.mkstemp(dir=pkgdir)
  309. try:
  310. os.close(fd)
  311. self.download_to_file(pkgname, pkgver, subdir, tmp)
  312. if subdir == REQUIRES:
  313. os.rename(tmp, target)
  314. else:
  315. self.extract_tar(subdir, pkgdir, tmp)
  316. finally:
  317. try:
  318. os.remove(tmp)
  319. except OSError:
  320. pass
  321. return target
  322. def __iter__(self):
  323. if not self.constraints:
  324. self.compute_dependencies()
  325. for x in self.constraints:
  326. if x.pkgname == self.myproject:
  327. continue
  328. yield x
  329. def execute(self):
  330. self.compute_dependencies()
  331. packages = package_reader()
  332. def load_tools(ctx, extra):
  333. global packages
  334. for c in packages:
  335. packages.get_pkg_dir(c.pkgname, c.required_version, extra)
  336. noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
  337. for x in os.listdir(noarchdir):
  338. if x.startswith('waf_') and x.endswith('.py'):
  339. ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
  340. def options(opt):
  341. opt.add_option('--offline', action='store_true')
  342. packages.execute()
  343. load_tools(opt, REQUIRES)
  344. def configure(conf):
  345. load_tools(conf, conf.variant)
  346. def build(bld):
  347. load_tools(bld, bld.variant)