utils.py 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448
  1. #!/usr/bin/env python
  2. # vim:set et ts=4 sw=4:
  3. """Utility functions
  4. @contact: Debian FTP Master <ftpmaster@debian.org>
  5. @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
  6. @license: GNU General Public License version 2 or later
  7. """
  8. # This program is free software; you can redistribute it and/or modify
  9. # it under the terms of the GNU General Public License as published by
  10. # the Free Software Foundation; either version 2 of the License, or
  11. # (at your option) any later version.
  12. # This program is distributed in the hope that it will be useful,
  13. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. # GNU General Public License for more details.
  16. # You should have received a copy of the GNU General Public License
  17. # along with this program; if not, write to the Free Software
  18. # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  19. from __future__ import absolute_import, print_function
  20. import commands
  21. import codecs
  22. import datetime
  23. import os
  24. import pwd
  25. import grp
  26. import socket
  27. import shutil
  28. import sqlalchemy.sql as sql
  29. import sys
  30. import tempfile
  31. import apt_inst
  32. import apt_pkg
  33. import re
  34. import email as modemail
  35. import subprocess
  36. import ldap
  37. import errno
  38. import functools
  39. import daklib.config as config
  40. import daklib.daksubprocess
  41. from .dbconn import DBConn, get_architecture, get_component, get_suite, \
  42. get_active_keyring_paths, \
  43. get_suite_architectures, get_or_set_metadatakey, \
  44. Component, Override, OverrideType
  45. from .dak_exceptions import *
  46. from .gpg import SignedFile
  47. from .textutils import fix_maintainer
  48. from .regexes import re_html_escaping, html_escaping, re_single_line_field, \
  49. re_multi_line_field, re_srchasver, \
  50. re_re_mark, re_whitespace_comment, re_issource, \
  51. re_build_dep_arch, re_parse_maintainer
  52. from .formats import parse_format, validate_changes_format
  53. from .srcformats import get_format_from_string
  54. from collections import defaultdict
  55. ################################################################################
  56. default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
  57. alias_cache = None #: Cache for email alias checks
  58. key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
  59. # Monkeypatch commands.getstatusoutput as it may not return the correct exit
  60. # code in lenny's Python. This also affects commands.getoutput and
  61. # commands.getstatus.
  62. def dak_getstatusoutput(cmd):
  63. pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
  64. stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
  65. output = pipe.stdout.read()
  66. pipe.wait()
  67. if output[-1:] == '\n':
  68. output = output[:-1]
  69. ret = pipe.wait()
  70. if ret is None:
  71. ret = 0
  72. return ret, output
  73. commands.getstatusoutput = dak_getstatusoutput
  74. ################################################################################
  75. def html_escape(s):
  76. """ Escape html chars """
  77. return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
  78. ################################################################################
  79. def open_file(filename, mode='r'):
  80. """
  81. Open C{file}, return fileobject.
  82. @type filename: string
  83. @param filename: path/filename to open
  84. @type mode: string
  85. @param mode: open mode
  86. @rtype: fileobject
  87. @return: open fileobject
  88. @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
  89. """
  90. try:
  91. f = open(filename, mode)
  92. except IOError:
  93. raise CantOpenError(filename)
  94. return f
  95. ################################################################################
  96. def our_raw_input(prompt=""):
  97. if prompt:
  98. while 1:
  99. try:
  100. sys.stdout.write(prompt)
  101. break
  102. except IOError:
  103. pass
  104. sys.stdout.flush()
  105. try:
  106. ret = raw_input()
  107. return ret
  108. except EOFError:
  109. sys.stderr.write("\nUser interrupt (^D).\n")
  110. raise SystemExit
  111. ################################################################################
  112. def extract_component_from_section(section):
  113. component = ""
  114. if section.find('/') != -1:
  115. component = section.split('/')[0]
  116. # Expand default component
  117. if component == "":
  118. component = "main"
  119. return (section, component)
  120. ################################################################################
  121. def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
  122. require_signature = True
  123. if keyrings is None:
  124. keyrings = []
  125. require_signature = False
  126. signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
  127. contents = signed_file.contents
  128. error = ""
  129. changes = {}
  130. # Split the lines in the input, keeping the linebreaks.
  131. lines = contents.splitlines(True)
  132. if len(lines) == 0:
  133. raise ParseChangesError("[Empty changes file]")
  134. # Reindex by line number so we can easily verify the format of
  135. # .dsc files...
  136. index = 0
  137. indexed_lines = {}
  138. for line in lines:
  139. index += 1
  140. indexed_lines[index] = line[:-1]
  141. num_of_lines = len(indexed_lines.keys())
  142. index = 0
  143. first = -1
  144. while index < num_of_lines:
  145. index += 1
  146. line = indexed_lines[index]
  147. if line == "" and signing_rules == 1:
  148. if index != num_of_lines:
  149. raise InvalidDscError(index)
  150. break
  151. slf = re_single_line_field.match(line)
  152. if slf:
  153. field = slf.groups()[0].lower()
  154. changes[field] = slf.groups()[1]
  155. first = 1
  156. continue
  157. if line == " .":
  158. changes[field] += '\n'
  159. continue
  160. mlf = re_multi_line_field.match(line)
  161. if mlf:
  162. if first == -1:
  163. raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
  164. if first == 1 and changes[field] != "":
  165. changes[field] += '\n'
  166. first = 0
  167. changes[field] += mlf.groups()[0] + '\n'
  168. continue
  169. error += line
  170. changes["filecontents"] = armored_contents
  171. if "source" in changes:
  172. # Strip the source version in brackets from the source field,
  173. # put it in the "source-version" field instead.
  174. srcver = re_srchasver.search(changes["source"])
  175. if srcver:
  176. changes["source"] = srcver.group(1)
  177. changes["source-version"] = srcver.group(2)
  178. if error:
  179. raise ParseChangesError(error)
  180. return changes
  181. ################################################################################
  182. def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
  183. """
  184. Parses a changes file and returns a dictionary where each field is a
  185. key. The mandatory first argument is the filename of the .changes
  186. file.
  187. signing_rules is an optional argument:
  188. - If signing_rules == -1, no signature is required.
  189. - If signing_rules == 0 (the default), a signature is required.
  190. - If signing_rules == 1, it turns on the same strict format checking
  191. as dpkg-source.
  192. The rules for (signing_rules == 1)-mode are:
  193. - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
  194. followed by any PGP header data and must end with a blank line.
  195. - The data section must end with a blank line and must be followed by
  196. "-----BEGIN PGP SIGNATURE-----".
  197. """
  198. with open_file(filename) as changes_in:
  199. content = changes_in.read()
  200. try:
  201. unicode(content, 'utf-8')
  202. except UnicodeError:
  203. raise ChangesUnicodeError("Changes file not proper utf-8")
  204. changes = parse_deb822(content, signing_rules, keyrings=keyrings)
  205. if not dsc_file:
  206. # Finally ensure that everything needed for .changes is there
  207. must_keywords = ('Format', 'Date', 'Source', 'Architecture', 'Version',
  208. 'Distribution', 'Maintainer', 'Changes', 'Files')
  209. missingfields = []
  210. for keyword in must_keywords:
  211. if keyword.lower() not in changes:
  212. missingfields.append(keyword)
  213. if len(missingfields):
  214. raise ParseChangesError("Missing mandatory field(s) in changes file (policy 5.5): %s" % (missingfields))
  215. return changes
  216. ################################################################################
  217. def hash_key(hashname):
  218. return '%ssum' % hashname
  219. ################################################################################
  220. def check_dsc_files(dsc_filename, dsc, dsc_files):
  221. """
  222. Verify that the files listed in the Files field of the .dsc are
  223. those expected given the announced Format.
  224. @type dsc_filename: string
  225. @param dsc_filename: path of .dsc file
  226. @type dsc: dict
  227. @param dsc: the content of the .dsc parsed by C{parse_changes()}
  228. @type dsc_files: dict
  229. @param dsc_files: the file list returned by C{build_file_list()}
  230. @rtype: list
  231. @return: all errors detected
  232. """
  233. rejmsg = []
  234. # Ensure .dsc lists proper set of source files according to the format
  235. # announced
  236. has = defaultdict(lambda: 0)
  237. ftype_lookup = (
  238. (r'orig\.tar\.(gz|bz2|xz)\.asc', ('orig_tar_sig',)),
  239. (r'orig\.tar\.gz', ('orig_tar_gz', 'orig_tar')),
  240. (r'diff\.gz', ('debian_diff',)),
  241. (r'tar\.gz', ('native_tar_gz', 'native_tar')),
  242. (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
  243. (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
  244. (r'tar\.(gz|bz2|xz)', ('native_tar',)),
  245. (r'orig-.+\.tar\.(gz|bz2|xz)\.asc', ('more_orig_tar_sig',)),
  246. (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
  247. )
  248. for f in dsc_files:
  249. m = re_issource.match(f)
  250. if not m:
  251. rejmsg.append("%s: %s in Files field not recognised as source."
  252. % (dsc_filename, f))
  253. continue
  254. # Populate 'has' dictionary by resolving keys in lookup table
  255. matched = False
  256. for regex, keys in ftype_lookup:
  257. if re.match(regex, m.group(3)):
  258. matched = True
  259. for key in keys:
  260. has[key] += 1
  261. break
  262. # File does not match anything in lookup table; reject
  263. if not matched:
  264. rejmsg.append("%s: unexpected source file '%s'" % (dsc_filename, f))
  265. break
  266. # Check for multiple files
  267. for file_type in ('orig_tar', 'orig_tar_sig', 'native_tar', 'debian_tar', 'debian_diff'):
  268. if has[file_type] > 1:
  269. rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
  270. # Source format specific tests
  271. try:
  272. format = get_format_from_string(dsc['format'])
  273. rejmsg.extend([
  274. '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
  275. ])
  276. except UnknownFormatError:
  277. # Not an error here for now
  278. pass
  279. return rejmsg
  280. ################################################################################
  281. # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
  282. def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
  283. files = {}
  284. # Make sure we have a Files: field to parse...
  285. if field not in changes:
  286. raise NoFilesFieldError
  287. # Validate .changes Format: field
  288. if not is_a_dsc:
  289. validate_changes_format(parse_format(changes['format']), field)
  290. includes_section = (not is_a_dsc) and field == "files"
  291. # Parse each entry/line:
  292. for i in changes[field].split('\n'):
  293. if not i:
  294. break
  295. s = i.split()
  296. section = priority = ""
  297. try:
  298. if includes_section:
  299. (md5, size, section, priority, name) = s
  300. else:
  301. (md5, size, name) = s
  302. except ValueError:
  303. raise ParseChangesError(i)
  304. if section == "":
  305. section = "-"
  306. if priority == "":
  307. priority = "-"
  308. (section, component) = extract_component_from_section(section)
  309. files[name] = dict(size=size, section=section,
  310. priority=priority, component=component)
  311. files[name][hashname] = md5
  312. return files
  313. ################################################################################
  314. def send_mail(message, filename="", whitelists=None):
  315. """sendmail wrapper, takes _either_ a message string or a file as arguments
  316. @type whitelists: list of (str or None)
  317. @param whitelists: path to whitelists. C{None} or an empty list whitelists
  318. everything, otherwise an address is whitelisted if it is
  319. included in any of the lists.
  320. In addition a global whitelist can be specified in
  321. Dinstall::MailWhiteList.
  322. """
  323. maildir = Cnf.get('Dir::Mail')
  324. if maildir:
  325. path = os.path.join(maildir, datetime.datetime.now().isoformat())
  326. path = find_next_free(path)
  327. with open(path, 'w') as fh:
  328. print(message, end=' ', file=fh)
  329. # Check whether we're supposed to be sending mail
  330. if "Dinstall::Options::No-Mail" in Cnf and Cnf["Dinstall::Options::No-Mail"]:
  331. return
  332. # If we've been passed a string dump it into a temporary file
  333. if message:
  334. (fd, filename) = tempfile.mkstemp()
  335. os.write(fd, message)
  336. os.close(fd)
  337. if whitelists is None or None in whitelists:
  338. whitelists = []
  339. if Cnf.get('Dinstall::MailWhiteList', ''):
  340. whitelists.append(Cnf['Dinstall::MailWhiteList'])
  341. if len(whitelists) != 0:
  342. with open_file(filename) as message_in:
  343. message_raw = modemail.message_from_file(message_in)
  344. whitelist = []
  345. for path in whitelists:
  346. with open_file(path, 'r') as whitelist_in:
  347. for line in whitelist_in:
  348. if not re_whitespace_comment.match(line):
  349. if re_re_mark.match(line):
  350. whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
  351. else:
  352. whitelist.append(re.compile(re.escape(line.strip())))
  353. # Fields to check.
  354. fields = ["To", "Bcc", "Cc"]
  355. for field in fields:
  356. # Check each field
  357. value = message_raw.get(field, None)
  358. if value is not None:
  359. match = []
  360. for item in value.split(","):
  361. (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
  362. mail_whitelisted = 0
  363. for wr in whitelist:
  364. if wr.match(email):
  365. mail_whitelisted = 1
  366. break
  367. if not mail_whitelisted:
  368. print("Skipping {0} since it's not whitelisted".format(item))
  369. continue
  370. match.append(item)
  371. # Doesn't have any mail in whitelist so remove the header
  372. if len(match) == 0:
  373. del message_raw[field]
  374. else:
  375. message_raw.replace_header(field, ', '.join(match))
  376. # Change message fields in order if we don't have a To header
  377. if "To" not in message_raw:
  378. fields.reverse()
  379. for field in fields:
  380. if field in message_raw:
  381. message_raw[fields[-1]] = message_raw[field]
  382. del message_raw[field]
  383. break
  384. else:
  385. # Clean up any temporary files
  386. # and return, as we removed all recipients.
  387. if message:
  388. os.unlink(filename)
  389. return
  390. fd = os.open(filename, os.O_RDWR | os.O_EXCL, 0o700)
  391. os.write(fd, message_raw.as_string(True))
  392. os.close(fd)
  393. # Invoke sendmail
  394. (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
  395. if (result != 0):
  396. raise SendmailFailedError(output)
  397. # Clean up any temporary files
  398. if message:
  399. os.unlink(filename)
  400. ################################################################################
  401. def poolify(source):
  402. if source[:3] == "lib":
  403. return source[:4] + '/' + source + '/'
  404. else:
  405. return source[:1] + '/' + source + '/'
  406. ################################################################################
  407. def move(src, dest, overwrite=0, perms=0o664):
  408. if os.path.exists(dest) and os.path.isdir(dest):
  409. dest_dir = dest
  410. else:
  411. dest_dir = os.path.dirname(dest)
  412. if not os.path.lexists(dest_dir):
  413. umask = os.umask(00000)
  414. os.makedirs(dest_dir, 0o2775)
  415. os.umask(umask)
  416. #print "Moving %s to %s..." % (src, dest)
  417. if os.path.exists(dest) and os.path.isdir(dest):
  418. dest += '/' + os.path.basename(src)
  419. # Don't overwrite unless forced to
  420. if os.path.lexists(dest):
  421. if not overwrite:
  422. fubar("Can't move %s to %s - file already exists." % (src, dest))
  423. else:
  424. if not os.access(dest, os.W_OK):
  425. fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
  426. shutil.copy2(src, dest)
  427. os.chmod(dest, perms)
  428. os.unlink(src)
  429. def copy(src, dest, overwrite=0, perms=0o664):
  430. if os.path.exists(dest) and os.path.isdir(dest):
  431. dest_dir = dest
  432. else:
  433. dest_dir = os.path.dirname(dest)
  434. if not os.path.exists(dest_dir):
  435. umask = os.umask(00000)
  436. os.makedirs(dest_dir, 0o2775)
  437. os.umask(umask)
  438. #print "Copying %s to %s..." % (src, dest)
  439. if os.path.exists(dest) and os.path.isdir(dest):
  440. dest += '/' + os.path.basename(src)
  441. # Don't overwrite unless forced to
  442. if os.path.lexists(dest):
  443. if not overwrite:
  444. raise FileExistsError
  445. else:
  446. if not os.access(dest, os.W_OK):
  447. raise CantOverwriteError
  448. shutil.copy2(src, dest)
  449. os.chmod(dest, perms)
  450. ################################################################################
  451. def which_conf_file():
  452. if os.getenv('DAK_CONFIG'):
  453. return os.getenv('DAK_CONFIG')
  454. res = socket.getfqdn()
  455. # In case we allow local config files per user, try if one exists
  456. if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
  457. homedir = os.getenv("HOME")
  458. confpath = os.path.join(homedir, "/etc/dak.conf")
  459. if os.path.exists(confpath):
  460. apt_pkg.read_config_file_isc(Cnf, confpath)
  461. # We are still in here, so there is no local config file or we do
  462. # not allow local files. Do the normal stuff.
  463. if Cnf.get("Config::" + res + "::DakConfig"):
  464. return Cnf["Config::" + res + "::DakConfig"]
  465. return default_config
  466. ################################################################################
  467. def TemplateSubst(subst_map, filename):
  468. """ Perform a substition of template """
  469. with open_file(filename) as templatefile:
  470. template = templatefile.read()
  471. for k, v in subst_map.iteritems():
  472. template = template.replace(k, str(v))
  473. return template
  474. ################################################################################
  475. def fubar(msg, exit_code=1):
  476. sys.stderr.write("E: %s\n" % (msg))
  477. sys.exit(exit_code)
  478. def warn(msg):
  479. sys.stderr.write("W: %s\n" % (msg))
  480. ################################################################################
  481. # Returns the user name with a laughable attempt at rfc822 conformancy
  482. # (read: removing stray periods).
  483. def whoami():
  484. return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
  485. def getusername():
  486. return pwd.getpwuid(os.getuid())[0]
  487. ################################################################################
  488. def size_type(c):
  489. t = " B"
  490. if c > 10240:
  491. c = c / 1024
  492. t = " KB"
  493. if c > 10240:
  494. c = c / 1024
  495. t = " MB"
  496. return ("%d%s" % (c, t))
  497. ################################################################################
  498. def find_next_free(dest, too_many=100):
  499. extra = 0
  500. orig_dest = dest
  501. while os.path.lexists(dest) and extra < too_many:
  502. dest = orig_dest + '.' + repr(extra)
  503. extra += 1
  504. if extra >= too_many:
  505. raise NoFreeFilenameError
  506. return dest
  507. ################################################################################
  508. def result_join(original, sep='\t'):
  509. resultlist = []
  510. for i in xrange(len(original)):
  511. if original[i] is None:
  512. resultlist.append("")
  513. else:
  514. resultlist.append(original[i])
  515. return sep.join(resultlist)
  516. ################################################################################
  517. def prefix_multi_line_string(str, prefix, include_blank_lines=0):
  518. out = ""
  519. for line in str.split('\n'):
  520. line = line.strip()
  521. if line or include_blank_lines:
  522. out += "%s%s\n" % (prefix, line)
  523. # Strip trailing new line
  524. if out:
  525. out = out[:-1]
  526. return out
  527. ################################################################################
  528. def join_with_commas_and(list):
  529. if len(list) == 0:
  530. return "nothing"
  531. if len(list) == 1:
  532. return list[0]
  533. return ", ".join(list[:-1]) + " and " + list[-1]
  534. ################################################################################
  535. def pp_deps(deps):
  536. pp_deps = []
  537. for atom in deps:
  538. (pkg, version, constraint) = atom
  539. if constraint:
  540. pp_dep = "%s (%s %s)" % (pkg, constraint, version)
  541. else:
  542. pp_dep = pkg
  543. pp_deps.append(pp_dep)
  544. return " |".join(pp_deps)
  545. ################################################################################
  546. def get_conf():
  547. return Cnf
  548. ################################################################################
  549. def parse_args(Options):
  550. """ Handle -a, -c and -s arguments; returns them as SQL constraints """
  551. # XXX: This should go away and everything which calls it be converted
  552. # to use SQLA properly. For now, we'll just fix it not to use
  553. # the old Pg interface though
  554. session = DBConn().session()
  555. # Process suite
  556. if Options["Suite"]:
  557. suite_ids_list = []
  558. for suitename in split_args(Options["Suite"]):
  559. suite = get_suite(suitename, session=session)
  560. if not suite or suite.suite_id is None:
  561. warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
  562. else:
  563. suite_ids_list.append(suite.suite_id)
  564. if suite_ids_list:
  565. con_suites = "AND su.id IN (%s)" % ", ".join([str(i) for i in suite_ids_list])
  566. else:
  567. fubar("No valid suite given.")
  568. else:
  569. con_suites = ""
  570. # Process component
  571. if Options["Component"]:
  572. component_ids_list = []
  573. for componentname in split_args(Options["Component"]):
  574. component = get_component(componentname, session=session)
  575. if component is None:
  576. warn("component '%s' not recognised." % (componentname))
  577. else:
  578. component_ids_list.append(component.component_id)
  579. if component_ids_list:
  580. con_components = "AND c.id IN (%s)" % ", ".join([str(i) for i in component_ids_list])
  581. else:
  582. fubar("No valid component given.")
  583. else:
  584. con_components = ""
  585. # Process architecture
  586. con_architectures = ""
  587. check_source = 0
  588. if Options["Architecture"]:
  589. arch_ids_list = []
  590. for archname in split_args(Options["Architecture"]):
  591. if archname == "source":
  592. check_source = 1
  593. else:
  594. arch = get_architecture(archname, session=session)
  595. if arch is None:
  596. warn("architecture '%s' not recognised." % (archname))
  597. else:
  598. arch_ids_list.append(arch.arch_id)
  599. if arch_ids_list:
  600. con_architectures = "AND a.id IN (%s)" % ", ".join([str(i) for i in arch_ids_list])
  601. else:
  602. if not check_source:
  603. fubar("No valid architecture given.")
  604. else:
  605. check_source = 1
  606. return (con_suites, con_architectures, con_components, check_source)
  607. ################################################################################
  608. @functools.total_ordering
  609. class ArchKey(object):
  610. """
  611. Key object for use in sorting lists of architectures.
  612. Sorts normally except that 'source' dominates all others.
  613. """
  614. __slots__ = ['arch', 'issource']
  615. def __init__(self, arch, *args):
  616. self.arch = arch
  617. self.issource = arch == 'source'
  618. def __lt__(self, other):
  619. if self.issource:
  620. return not other.issource
  621. if other.issource:
  622. return False
  623. return self.arch < other.arch
  624. def __eq__(self, other):
  625. return self.arch == other.arch
  626. ################################################################################
  627. def split_args(s, dwim=True):
  628. """
  629. Split command line arguments which can be separated by either commas
  630. or whitespace. If dwim is set, it will complain about string ending
  631. in comma since this usually means someone did 'dak ls -a i386, m68k
  632. foo' or something and the inevitable confusion resulting from 'm68k'
  633. being treated as an argument is undesirable.
  634. """
  635. if s.find(",") == -1:
  636. return s.split()
  637. else:
  638. if s[-1:] == "," and dwim:
  639. fubar("split_args: found trailing comma, spurious space maybe?")
  640. return s.split(",")
  641. ################################################################################
  642. def gpg_keyring_args(keyrings=None):
  643. if not keyrings:
  644. keyrings = get_active_keyring_paths()
  645. return " ".join(["--keyring %s" % x for x in keyrings])
  646. ################################################################################
  647. def gpg_get_key_addresses(fingerprint):
  648. """retreive email addresses from gpg key uids for a given fingerprint"""
  649. addresses = key_uid_email_cache.get(fingerprint)
  650. if addresses is not None:
  651. return addresses
  652. addresses = list()
  653. try:
  654. with open(os.devnull, "wb") as devnull:
  655. output = daklib.daksubprocess.check_output(
  656. ["gpg", "--no-default-keyring"] + gpg_keyring_args().split()
  657. + ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
  658. except subprocess.CalledProcessError:
  659. pass
  660. else:
  661. for l in output.split('\n'):
  662. parts = l.split(':')
  663. if parts[0] not in ("uid", "pub"):
  664. continue
  665. if parts[1] in ("i", "d", "r"):
  666. # Skip uid that is invalid, disabled or revoked
  667. continue
  668. try:
  669. uid = parts[9]
  670. except IndexError:
  671. continue
  672. try:
  673. # Do not use unicode_escape, because it is locale-specific
  674. uid = codecs.decode(uid, "string_escape").decode("utf-8")
  675. except UnicodeDecodeError:
  676. uid = uid.decode("latin1") # does not fail
  677. m = re_parse_maintainer.match(uid)
  678. if not m:
  679. continue
  680. address = m.group(2)
  681. address = address.encode("utf8") # dak still uses bytes
  682. if address.endswith('@debian.org'):
  683. # prefer @debian.org addresses
  684. # TODO: maybe not hardcode the domain
  685. addresses.insert(0, address)
  686. else:
  687. addresses.append(address)
  688. key_uid_email_cache[fingerprint] = addresses
  689. return addresses
  690. ################################################################################
  691. def get_logins_from_ldap(fingerprint='*'):
  692. """retrieve login from LDAP linked to a given fingerprint"""
  693. LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
  694. LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
  695. l = ldap.open(LDAPServer)
  696. l.simple_bind_s('', '')
  697. Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
  698. '(keyfingerprint=%s)' % fingerprint,
  699. ['uid', 'keyfingerprint'])
  700. login = {}
  701. for elem in Attrs:
  702. login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
  703. return login
  704. ################################################################################
  705. def get_users_from_ldap():
  706. """retrieve login and user names from LDAP"""
  707. LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
  708. LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
  709. l = ldap.open(LDAPServer)
  710. l.simple_bind_s('', '')
  711. Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
  712. '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
  713. users = {}
  714. for elem in Attrs:
  715. elem = elem[1]
  716. name = []
  717. for k in ('cn', 'mn', 'sn'):
  718. try:
  719. if elem[k][0] != '-':
  720. name.append(elem[k][0])
  721. except KeyError:
  722. pass
  723. users[' '.join(name)] = elem['uid'][0]
  724. return users
  725. ################################################################################
  726. def clean_symlink(src, dest, root):
  727. """
  728. Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
  729. Returns fixed 'src'
  730. """
  731. src = src.replace(root, '', 1)
  732. dest = dest.replace(root, '', 1)
  733. dest = os.path.dirname(dest)
  734. new_src = '../' * len(dest.split('/'))
  735. return new_src + src
  736. ################################################################################
  737. def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
  738. """
  739. Return a secure and unique filename by pre-creating it.
  740. @type directory: str
  741. @param directory: If non-null it will be the directory the file is pre-created in.
  742. @type prefix: str
  743. @param prefix: The filename will be prefixed with this string
  744. @type suffix: str
  745. @param suffix: The filename will end with this string
  746. @type mode: str
  747. @param mode: If set the file will get chmodded to those permissions
  748. @type group: str
  749. @param group: If set the file will get chgrped to the specified group.
  750. @rtype: list
  751. @return: Returns a pair (fd, name)
  752. """
  753. (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
  754. if mode:
  755. os.chmod(tfname, mode)
  756. if group:
  757. gid = grp.getgrnam(group).gr_gid
  758. os.chown(tfname, -1, gid)
  759. return (tfd, tfname)
  760. ################################################################################
  761. def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
  762. """
  763. Return a secure and unique directory by pre-creating it.
  764. @type parent: str
  765. @param parent: If non-null it will be the directory the directory is pre-created in.
  766. @type prefix: str
  767. @param prefix: The filename will be prefixed with this string
  768. @type suffix: str
  769. @param suffix: The filename will end with this string
  770. @type mode: str
  771. @param mode: If set the file will get chmodded to those permissions
  772. @type group: str
  773. @param group: If set the file will get chgrped to the specified group.
  774. @rtype: list
  775. @return: Returns a pair (fd, name)
  776. """
  777. tfname = tempfile.mkdtemp(suffix, prefix, parent)
  778. if mode:
  779. os.chmod(tfname, mode)
  780. if group:
  781. gid = grp.getgrnam(group).gr_gid
  782. os.chown(tfname, -1, gid)
  783. return tfname
  784. ################################################################################
  785. def is_email_alias(email):
  786. """ checks if the user part of the email is listed in the alias file """
  787. global alias_cache
  788. if alias_cache is None:
  789. aliasfn = which_alias_file()
  790. alias_cache = set()
  791. if aliasfn:
  792. for l in open(aliasfn):
  793. alias_cache.add(l.split(':')[0])
  794. uid = email.split('@')[0]
  795. return uid in alias_cache
  796. ################################################################################
  797. def get_changes_files(from_dir):
  798. """
  799. Takes a directory and lists all .changes files in it (as well as chdir'ing
  800. to the directory; this is due to broken behaviour on the part of p-u/p-a
  801. when you're not in the right place)
  802. Returns a list of filenames
  803. """
  804. try:
  805. # Much of the rest of p-u/p-a depends on being in the right place
  806. os.chdir(from_dir)
  807. changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
  808. except OSError as e:
  809. fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
  810. return changes_files
  811. ################################################################################
  812. Cnf = config.Config().Cnf
  813. ################################################################################
  814. def parse_wnpp_bug_file(file="/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
  815. """
  816. Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
  817. Well, actually it parsed a local copy, but let's document the source
  818. somewhere ;)
  819. returns a dict associating source package name with a list of open wnpp
  820. bugs (Yes, there might be more than one)
  821. """
  822. line = []
  823. try:
  824. f = open(file)
  825. lines = f.readlines()
  826. except IOError as e:
  827. print("Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file)
  828. lines = []
  829. wnpp = {}
  830. for line in lines:
  831. splited_line = line.split(": ", 1)
  832. if len(splited_line) > 1:
  833. wnpp[splited_line[0]] = splited_line[1].split("|")
  834. for source in wnpp.keys():
  835. bugs = []
  836. for wnpp_bug in wnpp[source]:
  837. bug_no = re.search(r"(\d)+", wnpp_bug).group()
  838. if bug_no:
  839. bugs.append(bug_no)
  840. wnpp[source] = bugs
  841. return wnpp
  842. ################################################################################
  843. def deb_extract_control(fh):
  844. """extract DEBIAN/control from a binary package"""
  845. return apt_inst.DebFile(fh).control.extractdata("control")
  846. ################################################################################
  847. def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
  848. """mail addresses to contact for an upload
  849. @type maintainer: str
  850. @param maintainer: Maintainer field of the .changes file
  851. @type changed_by: str
  852. @param changed_by: Changed-By field of the .changes file
  853. @type fingerprint: str
  854. @param fingerprint: fingerprint of the key used to sign the upload
  855. @rtype: list of str
  856. @return: list of RFC 2047-encoded mail addresses to contact regarding
  857. this upload
  858. """
  859. recipients = Cnf.value_list('Dinstall::UploadMailRecipients')
  860. if not recipients:
  861. recipients = [
  862. 'maintainer',
  863. 'changed_by',
  864. 'signer',
  865. ]
  866. # Ensure signer is last if present
  867. try:
  868. recipients.remove('signer')
  869. recipients.append('signer')
  870. except ValueError:
  871. pass
  872. # Compute the set of addresses of the recipients
  873. addresses = set() # Name + email
  874. emails = set() # Email only, used to avoid duplicates
  875. for recipient in recipients:
  876. if recipient.startswith('mail:'): # Email hardcoded in config
  877. address = recipient[5:]
  878. elif recipient == 'maintainer':
  879. address = maintainer
  880. elif recipient == 'changed_by':
  881. address = changed_by
  882. elif recipient == 'signer':
  883. fpr_addresses = gpg_get_key_addresses(fingerprint)
  884. address = fpr_addresses[0] if fpr_addresses else None
  885. if any(x in emails for x in fpr_addresses):
  886. # The signer already gets a copy via another email
  887. address = None
  888. else:
  889. raise Exception('Unsupported entry in {0}: {1}'.format(
  890. 'Dinstall::UploadMailRecipients', recipient))
  891. if address is not None:
  892. email = fix_maintainer(address)[3]
  893. if email not in emails:
  894. addresses.add(address)
  895. emails.add(email)
  896. encoded_addresses = [fix_maintainer(e)[1] for e in addresses]
  897. return encoded_addresses
  898. ################################################################################
  899. def call_editor(text="", suffix=".txt"):
  900. """run editor and return the result as a string
  901. @type text: str
  902. @param text: initial text
  903. @type suffix: str
  904. @param suffix: extension for temporary file
  905. @rtype: str
  906. @return: string with the edited text
  907. """
  908. editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
  909. tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
  910. try:
  911. print(text, end=' ', file=tmp)
  912. tmp.close()
  913. daklib.daksubprocess.check_call([editor, tmp.name])
  914. return open(tmp.name, 'r').read()
  915. finally:
  916. os.unlink(tmp.name)
  917. ################################################################################
  918. def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False, include_arch_all=True):
  919. dbsuite = get_suite(suite, session)
  920. overridesuite = dbsuite
  921. if dbsuite.overridesuite is not None:
  922. overridesuite = get_suite(dbsuite.overridesuite, session)
  923. dep_problem = 0
  924. p2c = {}
  925. all_broken = defaultdict(lambda: defaultdict(set))
  926. if arches:
  927. all_arches = set(arches)
  928. else:
  929. all_arches = set(x.arch_string for x in get_suite_architectures(suite))
  930. all_arches -= set(["source", "all"])
  931. removal_set = set(removals)
  932. metakey_d = get_or_set_metadatakey("Depends", session)
  933. metakey_p = get_or_set_metadatakey("Provides", session)
  934. params = {
  935. 'suite_id': dbsuite.suite_id,
  936. 'metakey_d_id': metakey_d.key_id,
  937. 'metakey_p_id': metakey_p.key_id,
  938. }
  939. if include_arch_all:
  940. rdep_architectures = all_arches | set(['all'])
  941. else:
  942. rdep_architectures = all_arches
  943. for architecture in rdep_architectures:
  944. deps = {}
  945. sources = {}
  946. virtual_packages = {}
  947. try:
  948. params['arch_id'] = get_architecture(architecture, session).arch_id
  949. except AttributeError:
  950. continue
  951. statement = sql.text('''
  952. SELECT b.package, s.source, c.name as component,
  953. (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
  954. (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
  955. FROM binaries b
  956. JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
  957. JOIN source s ON b.source = s.id
  958. JOIN files_archive_map af ON b.file = af.file_id
  959. JOIN component c ON af.component_id = c.id
  960. WHERE b.architecture = :arch_id''')
  961. query = session.query('package', 'source', 'component', 'depends', 'provides'). \
  962. from_statement(statement).params(params)
  963. for package, source, component, depends, provides in query:
  964. sources[package] = source
  965. p2c[package] = component
  966. if depends is not None:
  967. deps[package] = depends
  968. # Maintain a counter for each virtual package. If a
  969. # Provides: exists, set the counter to 0 and count all
  970. # provides by a package not in the list for removal.
  971. # If the counter stays 0 at the end, we know that only
  972. # the to-be-removed packages provided this virtual
  973. # package.
  974. if provides is not None:
  975. for virtual_pkg in provides.split(","):
  976. virtual_pkg = virtual_pkg.strip()
  977. if virtual_pkg == package:
  978. continue
  979. if virtual_pkg not in virtual_packages:
  980. virtual_packages[virtual_pkg] = 0
  981. if package not in removals:
  982. virtual_packages[virtual_pkg] += 1
  983. # If a virtual package is only provided by the to-be-removed
  984. # packages, treat the virtual package as to-be-removed too.
  985. removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])
  986. # Check binary dependencies (Depends)
  987. for package in deps:
  988. if package in removals:
  989. continue
  990. try:
  991. parsed_dep = apt_pkg.parse_depends(deps[package])
  992. except ValueError as e:
  993. print("Error for package %s: %s" % (package, e))
  994. parsed_dep = []
  995. for dep in parsed_dep:
  996. # Check for partial breakage. If a package has a ORed
  997. # dependency, there is only a dependency problem if all
  998. # packages in the ORed depends will be removed.
  999. unsat = 0
  1000. for dep_package, _, _ in dep:
  1001. if dep_package in removals:
  1002. unsat += 1
  1003. if unsat == len(dep):
  1004. component = p2c[package]
  1005. source = sources[package]
  1006. if component != "main":
  1007. source = "%s/%s" % (source, component)
  1008. all_broken[source][package].add(architecture)
  1009. dep_problem = 1
  1010. if all_broken and not quiet:
  1011. if cruft:
  1012. print(" - broken Depends:")
  1013. else:
  1014. print("# Broken Depends:")
  1015. for source, bindict in sorted(all_broken.items()):
  1016. lines = []
  1017. for binary, arches in sorted(bindict.items()):
  1018. if arches == all_arches or 'all' in arches:
  1019. lines.append(binary)
  1020. else:
  1021. lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
  1022. if cruft:
  1023. print(' %s: %s' % (source, lines[0]))
  1024. else:
  1025. print('%s: %s' % (source, lines[0]))
  1026. for line in lines[1:]:
  1027. if cruft:
  1028. print(' ' + ' ' * (len(source) + 2) + line)
  1029. else:
  1030. print(' ' * (len(source) + 2) + line)
  1031. if not cruft:
  1032. print()
  1033. # Check source dependencies (Build-Depends and Build-Depends-Indep)
  1034. all_broken = defaultdict(set)
  1035. metakey_bd = get_or_set_metadatakey("Build-Depends", session)
  1036. metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
  1037. if include_arch_all:
  1038. metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
  1039. else:
  1040. metakey_ids = (metakey_bd.key_id,)
  1041. params = {
  1042. 'suite_id': dbsuite.suite_id,
  1043. 'metakey_ids': metakey_ids,
  1044. }
  1045. statement = sql.text('''
  1046. SELECT s.source, string_agg(sm.value, ', ') as build_dep
  1047. FROM source s
  1048. JOIN source_metadata sm ON s.id = sm.src_id
  1049. WHERE s.id in
  1050. (SELECT src FROM newest_src_association
  1051. WHERE suite = :suite_id)
  1052. AND sm.key_id in :metakey_ids
  1053. GROUP BY s.id, s.source''')
  1054. query = session.query('source', 'build_dep').from_statement(statement). \
  1055. params(params)
  1056. for source, build_dep in query:
  1057. if source in removals:
  1058. continue
  1059. parsed_dep = []
  1060. if build_dep is not None:
  1061. # Remove [arch] information since we want to see breakage on all arches
  1062. build_dep = re_build_dep_arch.sub("", build_dep)
  1063. try:
  1064. parsed_dep = apt_pkg.parse_src_depends(build_dep)
  1065. except ValueError as e:
  1066. print("Error for source %s: %s" % (source, e))
  1067. for dep in parsed_dep:
  1068. unsat = 0
  1069. for dep_package, _, _ in dep:
  1070. if dep_package in removals:
  1071. unsat += 1
  1072. if unsat == len(dep):
  1073. component, = session.query(Component.component_name) \
  1074. .join(Component.overrides) \
  1075. .filter(Override.suite == overridesuite) \
  1076. .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
  1077. .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
  1078. .first()
  1079. key = source
  1080. if component != "main":
  1081. key = "%s/%s" % (source, component)
  1082. all_broken[key].add(pp_deps(dep))
  1083. dep_problem = 1
  1084. if all_broken and not quiet:
  1085. if cruft:
  1086. print(" - broken Build-Depends:")
  1087. else:
  1088. print("# Broken Build-Depends:")
  1089. for source, bdeps in sorted(all_broken.items()):
  1090. bdeps = sorted(bdeps)
  1091. if cruft:
  1092. print(' %s: %s' % (source, bdeps[0]))
  1093. else:
  1094. print('%s: %s' % (source, bdeps[0]))
  1095. for bdep in bdeps[1:]:
  1096. if cruft:
  1097. print(' ' + ' ' * (len(source) + 2) + bdep)
  1098. else:
  1099. print(' ' * (len(source) + 2) + bdep)
  1100. if not cruft:
  1101. print()
  1102. return dep_problem
  1103. ################################################################################
  1104. def parse_built_using(control):
  1105. """source packages referenced via Built-Using
  1106. @type control: dict-like
  1107. @param control: control file to take Built-Using field from
  1108. @rtype: list of (str, str)
  1109. @return: list of (source_name, source_version) pairs
  1110. """
  1111. built_using = control.get('Built-Using', None)
  1112. if built_using is None:
  1113. return []
  1114. bu = []
  1115. for dep in apt_pkg.parse_depends(built_using):
  1116. assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
  1117. source_name, source_version, comp = dep[0]
  1118. assert comp == '=', 'Built-Using must contain strict dependencies'
  1119. bu.append((source_name, source_version))
  1120. return bu
  1121. ################################################################################
  1122. def is_in_debug_section(control):
  1123. """binary package is a debug package
  1124. @type control: dict-like
  1125. @param control: control file of binary package
  1126. @rtype Boolean
  1127. @return: True if the binary package is a debug package
  1128. """
  1129. section = control['Section'].split('/', 1)[-1]
  1130. auto_built_package = control.get("Auto-Built-Package")
  1131. return section == "debug" and auto_built_package == "debug-symbols"
  1132. ################################################################################
  1133. def find_possibly_compressed_file(filename):
  1134. """
  1135. @type filename: string
  1136. @param filename: path to a control file (Sources, Packages, etc) to
  1137. look for
  1138. @rtype string
  1139. @return: path to the (possibly compressed) control file, or null if the
  1140. file doesn't exist
  1141. """
  1142. _compressions = ('', '.xz', '.gz', '.bz2')
  1143. for ext in _compressions:
  1144. _file = filename + ext
  1145. if os.path.exists(_file):
  1146. return _file
  1147. raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), filename)
  1148. ################################################################################
  1149. def parse_boolean_from_user(value):
  1150. value = value.lower()
  1151. if value in {'yes', 'true', 'enable', 'enabled'}:
  1152. return True
  1153. if value in {'no', 'false', 'disable', 'disabled'}:
  1154. return False
  1155. raise ValueError("Not sure whether %s should be a True or a False" % value)