utils.py 48 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531
  1. # vim:set et ts=4 sw=4:
  2. """Utility functions
  3. @contact: Debian FTP Master <ftpmaster@debian.org>
  4. @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
  5. @license: GNU General Public License version 2 or later
  6. """
  7. # This program is free software; you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation; either version 2 of the License, or
  10. # (at your option) any later version.
  11. # This program is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. # You should have received a copy of the GNU General Public License
  16. # along with this program; if not, write to the Free Software
  17. # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  18. import datetime
  19. import email.policy
  20. import errno
  21. import functools
  22. import grp
  23. import os
  24. import pwd
  25. import re
  26. import shutil
  27. import subprocess
  28. import sys
  29. import tempfile
  30. from collections import defaultdict
  31. from collections.abc import Iterable, Mapping, Sequence
  32. from typing import TYPE_CHECKING, Literal, NoReturn, Optional, Union
  33. import apt_inst
  34. import apt_pkg
  35. import sqlalchemy.sql as sql
  36. import daklib.config as config
  37. import daklib.mail
  38. from daklib.dbconn import (
  39. Architecture,
  40. Component,
  41. DBConn,
  42. Override,
  43. OverrideType,
  44. get_active_keyring_paths,
  45. get_architecture,
  46. get_component,
  47. get_or_set_metadatakey,
  48. get_suite,
  49. get_suite_architectures,
  50. )
  51. from .dak_exceptions import (
  52. InvalidDscError,
  53. NoFilesFieldError,
  54. NoFreeFilenameError,
  55. ParseChangesError,
  56. SendmailFailedError,
  57. UnknownFormatError,
  58. )
  59. from .formats import parse_format, validate_changes_format
  60. from .gpg import SignedFile
  61. from .regexes import (
  62. re_build_dep_arch,
  63. re_issource,
  64. re_multi_line_field,
  65. re_parse_maintainer,
  66. re_re_mark,
  67. re_single_line_field,
  68. re_srchasver,
  69. re_whitespace_comment,
  70. )
  71. from .srcformats import get_format_from_string
  72. from .textutils import fix_maintainer
  73. if TYPE_CHECKING:
  74. import daklib.daklog
  75. import daklib.fstransactions
  76. import daklib.upload
  77. ################################################################################
  78. key_uid_email_cache: dict[str, list[str]] = (
  79. {}
  80. ) #: Cache for email addresses from gpg key uids
  81. ################################################################################
  82. def input_or_exit(prompt: Optional[str] = None) -> str:
  83. try:
  84. return input(prompt)
  85. except EOFError:
  86. sys.exit("\nUser interrupt (^D).")
  87. ################################################################################
  88. def extract_component_from_section(section: str) -> tuple[str, str]:
  89. """split "section" into "section", "component" parts
  90. If "component" is not given, "main" is used instead.
  91. :return: tuple (section, component)
  92. """
  93. if section.find("/") != -1:
  94. return section, section.split("/", 1)[0]
  95. return section, "main"
  96. ################################################################################
  97. def parse_deb822(
  98. armored_contents: bytes, signing_rules: Literal[-1, 0, 1] = 0, keyrings=None
  99. ) -> dict[str, str]:
  100. require_signature = True
  101. if keyrings is None:
  102. keyrings = []
  103. require_signature = False
  104. signed_file = SignedFile(
  105. armored_contents, keyrings=keyrings, require_signature=require_signature
  106. )
  107. contents = signed_file.contents.decode("utf-8")
  108. error = ""
  109. changes = {}
  110. # Split the lines in the input, keeping the linebreaks.
  111. lines = contents.splitlines(True)
  112. if len(lines) == 0:
  113. raise ParseChangesError("[Empty changes file]")
  114. # Reindex by line number so we can easily verify the format of
  115. # .dsc files...
  116. index = 0
  117. indexed_lines = {}
  118. for line in lines:
  119. index += 1
  120. indexed_lines[index] = line[:-1]
  121. num_of_lines = len(indexed_lines)
  122. index = 0
  123. first = -1
  124. while index < num_of_lines:
  125. index += 1
  126. line = indexed_lines[index]
  127. if line == "" and signing_rules == 1:
  128. if index != num_of_lines:
  129. raise InvalidDscError(index)
  130. break
  131. if slf := re_single_line_field.match(line):
  132. field = slf.groups()[0].lower()
  133. changes[field] = slf.groups()[1]
  134. first = 1
  135. continue
  136. if line == " .":
  137. changes[field] += "\n"
  138. continue
  139. if mlf := re_multi_line_field.match(line):
  140. if first == -1:
  141. raise ParseChangesError(
  142. "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
  143. )
  144. if first == 1 and changes[field] != "":
  145. changes[field] += "\n"
  146. first = 0
  147. changes[field] += mlf.groups()[0] + "\n"
  148. continue
  149. error += line
  150. changes["filecontents"] = armored_contents.decode()
  151. if "source" in changes:
  152. # Strip the source version in brackets from the source field,
  153. # put it in the "source-version" field instead.
  154. if srcver := re_srchasver.search(changes["source"]):
  155. changes["source"] = srcver.group(1)
  156. changes["source-version"] = srcver.group(2)
  157. if error:
  158. raise ParseChangesError(error)
  159. return changes
  160. ################################################################################
  161. def parse_changes(
  162. filename: str,
  163. signing_rules: Literal[-1, 0, 1] = 0,
  164. dsc_file: bool = False,
  165. keyrings=None,
  166. ) -> dict[str, str]:
  167. """
  168. Parses a changes or source control (.dsc) file and returns a dictionary
  169. where each field is a key. The mandatory first argument is the
  170. filename of the .changes file.
  171. signing_rules is an optional argument:
  172. - If signing_rules == -1, no signature is required.
  173. - If signing_rules == 0 (the default), a signature is required.
  174. - If signing_rules == 1, it turns on the same strict format checking
  175. as dpkg-source.
  176. The rules for (signing_rules == 1)-mode are:
  177. - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
  178. followed by any PGP header data and must end with a blank line.
  179. - The data section must end with a blank line and must be followed by
  180. "-----BEGIN PGP SIGNATURE-----".
  181. :param dsc_file: `filename` is a Debian source control (.dsc) file
  182. """
  183. with open(filename, "rb") as changes_in:
  184. content = changes_in.read()
  185. changes = parse_deb822(content, signing_rules, keyrings=keyrings)
  186. if not dsc_file:
  187. # Finally ensure that everything needed for .changes is there
  188. must_keywords = (
  189. "Format",
  190. "Date",
  191. "Source",
  192. "Architecture",
  193. "Version",
  194. "Distribution",
  195. "Maintainer",
  196. "Changes",
  197. "Files",
  198. )
  199. missingfields = []
  200. for keyword in must_keywords:
  201. if keyword.lower() not in changes:
  202. missingfields.append(keyword)
  203. if len(missingfields):
  204. raise ParseChangesError(
  205. "Missing mandatory field(s) in changes file (policy 5.5): %s"
  206. % (missingfields)
  207. )
  208. return changes
  209. ################################################################################
  210. def check_dsc_files(
  211. dsc_filename: str,
  212. dsc: Mapping[str, str],
  213. dsc_files: Mapping[str, Mapping[str, str]],
  214. ) -> list[str]:
  215. """
  216. Verify that the files listed in the Files field of the .dsc are
  217. those expected given the announced Format.
  218. :param dsc_filename: path of .dsc file
  219. :param dsc: the content of the .dsc parsed by :func:`parse_changes`
  220. :param dsc_files: the file list returned by :func:`build_file_list`
  221. :return: all errors detected
  222. """
  223. rejmsg = []
  224. # Ensure .dsc lists proper set of source files according to the format
  225. # announced
  226. has: defaultdict[str, int] = defaultdict(lambda: 0)
  227. ftype_lookup = (
  228. (r"orig\.tar\.(gz|bz2|xz)\.asc", ("orig_tar_sig",)),
  229. (r"orig\.tar\.gz", ("orig_tar_gz", "orig_tar")),
  230. (r"diff\.gz", ("debian_diff",)),
  231. (r"tar\.gz", ("native_tar_gz", "native_tar")),
  232. (r"debian\.tar\.(gz|bz2|xz)", ("debian_tar",)),
  233. (r"orig\.tar\.(gz|bz2|xz)", ("orig_tar",)),
  234. (r"tar\.(gz|bz2|xz)", ("native_tar",)),
  235. (r"orig-.+\.tar\.(gz|bz2|xz)\.asc", ("more_orig_tar_sig",)),
  236. (r"orig-.+\.tar\.(gz|bz2|xz)", ("more_orig_tar",)),
  237. )
  238. for f in dsc_files:
  239. m = re_issource.match(f)
  240. if not m:
  241. rejmsg.append(
  242. "%s: %s in Files field not recognised as source." % (dsc_filename, f)
  243. )
  244. continue
  245. # Populate 'has' dictionary by resolving keys in lookup table
  246. matched = False
  247. for regex, keys in ftype_lookup:
  248. if re.match(regex, m.group(3)):
  249. matched = True
  250. for key in keys:
  251. has[key] += 1
  252. break
  253. # File does not match anything in lookup table; reject
  254. if not matched:
  255. rejmsg.append("%s: unexpected source file '%s'" % (dsc_filename, f))
  256. break
  257. # Check for multiple files
  258. for file_type in (
  259. "orig_tar",
  260. "orig_tar_sig",
  261. "native_tar",
  262. "debian_tar",
  263. "debian_diff",
  264. ):
  265. if has[file_type] > 1:
  266. rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
  267. # Source format specific tests
  268. try:
  269. format = get_format_from_string(dsc["format"])
  270. rejmsg.extend(["%s: %s" % (dsc_filename, x) for x in format.reject_msgs(has)])
  271. except UnknownFormatError:
  272. # Not an error here for now
  273. pass
  274. return rejmsg
  275. ################################################################################
  276. # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
  277. def build_file_list(
  278. changes: Mapping[str, str], is_a_dsc: bool = False, field="files", hashname="md5sum"
  279. ) -> dict[str, dict[str, str]]:
  280. files = {}
  281. # Make sure we have a Files: field to parse...
  282. if field not in changes:
  283. raise NoFilesFieldError
  284. # Validate .changes Format: field
  285. if not is_a_dsc:
  286. validate_changes_format(parse_format(changes["format"]), field)
  287. includes_section = (not is_a_dsc) and field == "files"
  288. # Parse each entry/line:
  289. for i in changes[field].split("\n"):
  290. if not i:
  291. break
  292. s = i.split()
  293. section = priority = ""
  294. try:
  295. if includes_section:
  296. (md5, size, section, priority, name) = s
  297. else:
  298. (md5, size, name) = s
  299. except ValueError:
  300. raise ParseChangesError(i)
  301. if section == "":
  302. section = "-"
  303. if priority == "":
  304. priority = "-"
  305. (section, component) = extract_component_from_section(section)
  306. files[name] = dict(
  307. size=size, section=section, priority=priority, component=component
  308. )
  309. files[name][hashname] = md5
  310. return files
  311. ################################################################################
  312. def send_mail(message: str, whitelists: Optional[list[str]] = None) -> None:
  313. """sendmail wrapper, takes a message string
  314. :param whitelists: path to whitelists. :const:`None` or an empty list whitelists
  315. everything, otherwise an address is whitelisted if it is
  316. included in any of the lists.
  317. In addition a global whitelist can be specified in
  318. Dinstall::MailWhiteList.
  319. """
  320. msg = daklib.mail.parse_mail(message)
  321. # The incoming message might be UTF-8, but outgoing mail should
  322. # use a legacy-compatible encoding. Set the content to the
  323. # text to make sure this is the case.
  324. # Note that this does not work with multipart messages.
  325. msg.set_content(msg.get_payload(), cte="quoted-printable")
  326. # Check whether we're supposed to be sending mail
  327. call_sendmail = True
  328. if "Dinstall::Options::No-Mail" in Cnf and Cnf["Dinstall::Options::No-Mail"]:
  329. call_sendmail = False
  330. if whitelists is None or None in whitelists:
  331. whitelists = []
  332. if Cnf.get("Dinstall::MailWhiteList", ""):
  333. whitelists.append(Cnf["Dinstall::MailWhiteList"])
  334. if len(whitelists) != 0:
  335. whitelist = []
  336. for path in whitelists:
  337. with open(path, "r") as whitelist_in:
  338. for line in whitelist_in:
  339. if not re_whitespace_comment.match(line):
  340. if re_re_mark.match(line):
  341. whitelist.append(
  342. re.compile(re_re_mark.sub("", line.strip(), 1))
  343. )
  344. else:
  345. whitelist.append(re.compile(re.escape(line.strip())))
  346. # Fields to check.
  347. fields = ["To", "Bcc", "Cc"]
  348. for field in fields:
  349. # Check each field
  350. value = msg.get(field, None)
  351. if value is not None:
  352. match = []
  353. for item in value.split(","):
  354. (rfc822_maint, rfc2047_maint, name, mail) = fix_maintainer(
  355. item.strip()
  356. )
  357. mail_whitelisted = 0
  358. for wr in whitelist:
  359. if wr.match(mail):
  360. mail_whitelisted = 1
  361. break
  362. if not mail_whitelisted:
  363. print("Skipping {0} since it's not whitelisted".format(item))
  364. continue
  365. match.append(item)
  366. # Doesn't have any mail in whitelist so remove the header
  367. if len(match) == 0:
  368. del msg[field]
  369. else:
  370. msg.replace_header(field, ", ".join(match))
  371. # Change message fields in order if we don't have a To header
  372. if "To" not in msg:
  373. fields.reverse()
  374. for field in fields:
  375. if field in msg:
  376. msg[fields[-1]] = msg[field]
  377. del msg[field]
  378. break
  379. else:
  380. # return, as we removed all recipients.
  381. call_sendmail = False
  382. # sign mail
  383. if mailkey := Cnf.get("Dinstall::Mail-Signature-Key", ""):
  384. kwargs = {
  385. "keyids": [mailkey],
  386. "pubring": Cnf.get("Dinstall::SigningPubKeyring") or None,
  387. "secring": Cnf.get("Dinstall::SigningKeyring") or None,
  388. "homedir": Cnf.get("Dinstall::SigningHomedir") or None,
  389. "passphrase_file": Cnf.get("Dinstall::SigningPassphraseFile") or None,
  390. }
  391. msg = daklib.mail.sign_mail(msg, **kwargs)
  392. msg_bytes = msg.as_bytes(policy=email.policy.default)
  393. maildir = Cnf.get("Dir::Mail")
  394. if maildir:
  395. path = os.path.join(maildir, datetime.datetime.now().isoformat())
  396. path = find_next_free(path)
  397. with open(path, "wb") as fh:
  398. fh.write(msg_bytes)
  399. # Invoke sendmail
  400. if not call_sendmail:
  401. return
  402. try:
  403. subprocess.run(
  404. Cnf["Dinstall::SendmailCommand"].split(),
  405. input=msg_bytes,
  406. check=True,
  407. stdout=subprocess.PIPE,
  408. stderr=subprocess.STDOUT,
  409. )
  410. except subprocess.CalledProcessError as e:
  411. raise SendmailFailedError(e.output.decode().rstrip())
  412. ################################################################################
  413. def poolify(source: str) -> str:
  414. """convert `source` name into directory path used in pool structure"""
  415. if source[:3] == "lib":
  416. return source[:4] + "/" + source + "/"
  417. else:
  418. return source[:1] + "/" + source + "/"
  419. ################################################################################
  420. def move(src: str, dest: str, overwrite: bool = False, perms: int = 0o664) -> None:
  421. if os.path.exists(dest) and os.path.isdir(dest):
  422. dest_dir = dest
  423. else:
  424. dest_dir = os.path.dirname(dest)
  425. if not os.path.lexists(dest_dir):
  426. umask = os.umask(00000)
  427. os.makedirs(dest_dir, 0o2775)
  428. os.umask(umask)
  429. # print "Moving %s to %s..." % (src, dest)
  430. if os.path.exists(dest) and os.path.isdir(dest):
  431. dest += "/" + os.path.basename(src)
  432. # Don't overwrite unless forced to
  433. if os.path.lexists(dest):
  434. if not overwrite:
  435. fubar("Can't move %s to %s - file already exists." % (src, dest))
  436. else:
  437. if not os.access(dest, os.W_OK):
  438. fubar(
  439. "Can't move %s to %s - can't write to existing file." % (src, dest)
  440. )
  441. shutil.copy2(src, dest)
  442. os.chmod(dest, perms)
  443. os.unlink(src)
  444. ################################################################################
  445. def TemplateSubst(subst_map: Mapping[str, str], filename: str) -> str:
  446. """Perform a substition of template"""
  447. with open(filename) as templatefile:
  448. template = templatefile.read()
  449. for k, v in subst_map.items():
  450. template = template.replace(k, str(v))
  451. return template
  452. ################################################################################
  453. def fubar(msg: str, exit_code: int = 1) -> NoReturn:
  454. """print error message and exit program"""
  455. print("E:", msg, file=sys.stderr)
  456. sys.exit(exit_code)
  457. def warn(msg: str) -> None:
  458. """print warning message"""
  459. print("W:", msg, file=sys.stderr)
  460. ################################################################################
  461. def whoami() -> str:
  462. """get user name
  463. Returns the user name with a laughable attempt at rfc822 conformancy
  464. (read: removing stray periods).
  465. """
  466. return pwd.getpwuid(os.getuid())[4].split(",")[0].replace(".", "")
  467. def getusername() -> str:
  468. """get login name"""
  469. return pwd.getpwuid(os.getuid())[0]
  470. ################################################################################
  471. def size_type(c: Union[int, float]) -> str:
  472. t = " B"
  473. if c > 10240:
  474. c = c / 1024
  475. t = " KB"
  476. if c > 10240:
  477. c = c / 1024
  478. t = " MB"
  479. return "%d%s" % (c, t)
  480. ################################################################################
  481. def find_next_free(dest: str, too_many: int = 100) -> str:
  482. extra = 0
  483. orig_dest = dest
  484. while os.path.lexists(dest) and extra < too_many:
  485. dest = orig_dest + "." + repr(extra)
  486. extra += 1
  487. if extra >= too_many:
  488. raise NoFreeFilenameError
  489. return dest
  490. ################################################################################
  491. def result_join(original: Iterable[Optional[str]], sep: str = "\t") -> str:
  492. return sep.join(x if x is not None else "" for x in original)
  493. ################################################################################
  494. def prefix_multi_line_string(
  495. lines: str, prefix: str, include_blank_lines: bool = False
  496. ) -> str:
  497. """prepend `prefix` to each line in `lines`"""
  498. return "\n".join(
  499. prefix + cleaned_line
  500. for line in lines.split("\n")
  501. if (cleaned_line := line.strip()) or include_blank_lines
  502. )
  503. ################################################################################
  504. def join_with_commas_and(list: Sequence[str]) -> str:
  505. if len(list) == 0:
  506. return "nothing"
  507. if len(list) == 1:
  508. return list[0]
  509. return ", ".join(list[:-1]) + " and " + list[-1]
  510. ################################################################################
  511. def pp_deps(deps: Iterable[tuple[str, str, str]]) -> str:
  512. pp_deps = (
  513. f"{pkg} ({constraint} {version})" if constraint else pkg
  514. for pkg, constraint, version in deps
  515. )
  516. return " |".join(pp_deps)
  517. ################################################################################
  518. def get_conf():
  519. return Cnf
  520. ################################################################################
  521. def parse_args(Options) -> tuple[str, str, str, bool]:
  522. """Handle -a, -c and -s arguments; returns them as SQL constraints"""
  523. # XXX: This should go away and everything which calls it be converted
  524. # to use SQLA properly. For now, we'll just fix it not to use
  525. # the old Pg interface though
  526. session = DBConn().session()
  527. # Process suite
  528. if Options["Suite"]:
  529. suite_ids_list = []
  530. for suitename in split_args(Options["Suite"]):
  531. suite = get_suite(suitename, session=session)
  532. if not suite or suite.suite_id is None:
  533. warn(
  534. "suite '%s' not recognised."
  535. % (suite and suite.suite_name or suitename)
  536. )
  537. else:
  538. suite_ids_list.append(suite.suite_id)
  539. if suite_ids_list:
  540. con_suites = "AND su.id IN (%s)" % ", ".join(
  541. [str(i) for i in suite_ids_list]
  542. )
  543. else:
  544. fubar("No valid suite given.")
  545. else:
  546. con_suites = ""
  547. # Process component
  548. if Options["Component"]:
  549. component_ids_list = []
  550. for componentname in split_args(Options["Component"]):
  551. component = get_component(componentname, session=session)
  552. if component is None:
  553. warn("component '%s' not recognised." % (componentname))
  554. else:
  555. component_ids_list.append(component.component_id)
  556. if component_ids_list:
  557. con_components = "AND c.id IN (%s)" % ", ".join(
  558. [str(i) for i in component_ids_list]
  559. )
  560. else:
  561. fubar("No valid component given.")
  562. else:
  563. con_components = ""
  564. # Process architecture
  565. con_architectures = ""
  566. check_source = False
  567. if Options["Architecture"]:
  568. arch_ids_list = []
  569. for archname in split_args(Options["Architecture"]):
  570. if archname == "source":
  571. check_source = True
  572. else:
  573. arch = get_architecture(archname, session=session)
  574. if arch is None:
  575. warn("architecture '%s' not recognised." % (archname))
  576. else:
  577. arch_ids_list.append(arch.arch_id)
  578. if arch_ids_list:
  579. con_architectures = "AND a.id IN (%s)" % ", ".join(
  580. [str(i) for i in arch_ids_list]
  581. )
  582. else:
  583. if not check_source:
  584. fubar("No valid architecture given.")
  585. else:
  586. check_source = True
  587. return (con_suites, con_architectures, con_components, check_source)
  588. ################################################################################
  589. @functools.total_ordering
  590. class ArchKey:
  591. """
  592. Key object for use in sorting lists of architectures.
  593. Sorts normally except that 'source' dominates all others.
  594. """
  595. __slots__ = ["arch", "issource"]
  596. def __init__(self, arch, *args):
  597. self.arch = arch
  598. self.issource = arch == "source"
  599. def __lt__(self, other: "ArchKey") -> bool:
  600. if self.issource:
  601. return not other.issource
  602. if other.issource:
  603. return False
  604. return self.arch < other.arch
  605. def __eq__(self, other: object) -> bool:
  606. if not isinstance(other, ArchKey):
  607. return NotImplemented
  608. return self.arch == other.arch
  609. ################################################################################
  610. def split_args(s: str, dwim: bool = True) -> list[str]:
  611. """
  612. Split command line arguments which can be separated by either commas
  613. or whitespace. If dwim is set, it will complain about string ending
  614. in comma since this usually means someone did 'dak ls -a i386, m68k
  615. foo' or something and the inevitable confusion resulting from 'm68k'
  616. being treated as an argument is undesirable.
  617. """
  618. if s.find(",") == -1:
  619. return s.split()
  620. else:
  621. if s[-1:] == "," and dwim:
  622. fubar("split_args: found trailing comma, spurious space maybe?")
  623. return s.split(",")
  624. ################################################################################
  625. def gpg_keyring_args(keyrings: Optional[Iterable[str]] = None) -> list[str]:
  626. if keyrings is None:
  627. keyrings = get_active_keyring_paths()
  628. return ["--keyring={}".format(path) for path in keyrings]
  629. ################################################################################
  630. def _gpg_get_addresses_from_listing(output: bytes) -> list[str]:
  631. addresses: list[str] = []
  632. for line in output.split(b"\n"):
  633. parts = line.split(b":")
  634. if parts[0] not in (b"uid", b"pub"):
  635. continue
  636. if parts[1] in (b"i", b"d", b"r"):
  637. # Skip uid that is invalid, disabled or revoked
  638. continue
  639. try:
  640. uid_bytes = parts[9]
  641. except IndexError:
  642. continue
  643. try:
  644. uid = uid_bytes.decode(encoding="utf-8")
  645. except UnicodeDecodeError:
  646. # If the uid is not valid UTF-8, we assume it is an old uid
  647. # still encoding in Latin-1.
  648. uid = uid_bytes.decode(encoding="latin1")
  649. m = re_parse_maintainer.match(uid)
  650. if not m:
  651. continue
  652. address = m.group(2)
  653. if address.endswith("@debian.org"):
  654. # prefer @debian.org addresses
  655. # TODO: maybe not hardcode the domain
  656. addresses.insert(0, address)
  657. else:
  658. addresses.append(address)
  659. return addresses
  660. def gpg_get_key_addresses(fingerprint: str) -> list[str]:
  661. """retreive email addresses from gpg key uids for a given fingerprint"""
  662. addresses = key_uid_email_cache.get(fingerprint)
  663. if addresses is not None:
  664. return addresses
  665. try:
  666. cmd = ["gpg", "--no-default-keyring"]
  667. cmd.extend(gpg_keyring_args())
  668. cmd.extend(["--with-colons", "--list-keys", "--", fingerprint])
  669. output = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
  670. except subprocess.CalledProcessError:
  671. addresses = []
  672. else:
  673. addresses = _gpg_get_addresses_from_listing(output)
  674. key_uid_email_cache[fingerprint] = addresses
  675. return addresses
  676. ################################################################################
  677. def open_ldap_connection():
  678. """open connection to the configured LDAP server"""
  679. import ldap # type: ignore
  680. LDAPServer = Cnf["Import-LDAP-Fingerprints::LDAPServer"]
  681. ca_cert_file = Cnf.get("Import-LDAP-Fingerprints::CACertFile")
  682. conn = ldap.initialize(LDAPServer)
  683. if ca_cert_file:
  684. conn.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
  685. conn.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
  686. conn.set_option(ldap.OPT_X_TLS_NEWCTX, True)
  687. conn.start_tls_s()
  688. conn.simple_bind_s("", "")
  689. return conn
  690. ################################################################################
  691. def get_logins_from_ldap(fingerprint: str = "*") -> dict[str, str]:
  692. """retrieve login from LDAP linked to a given fingerprint"""
  693. import ldap
  694. conn = open_ldap_connection()
  695. LDAPDn = Cnf["Import-LDAP-Fingerprints::LDAPDn"]
  696. Attrs = conn.search_s(
  697. LDAPDn,
  698. ldap.SCOPE_ONELEVEL,
  699. "(keyfingerprint=%s)" % fingerprint,
  700. ["uid", "keyfingerprint"],
  701. )
  702. login: dict[str, str] = {}
  703. for elem in Attrs:
  704. fpr = elem[1]["keyFingerPrint"][0].decode()
  705. uid = elem[1]["uid"][0].decode()
  706. login[fpr] = uid
  707. return login
  708. ################################################################################
  709. def get_users_from_ldap() -> dict[str, str]:
  710. """retrieve login and user names from LDAP"""
  711. import ldap
  712. conn = open_ldap_connection()
  713. LDAPDn = Cnf["Import-LDAP-Fingerprints::LDAPDn"]
  714. Attrs = conn.search_s(
  715. LDAPDn, ldap.SCOPE_ONELEVEL, "(uid=*)", ["uid", "cn", "mn", "sn"]
  716. )
  717. users: dict[str, str] = {}
  718. for elem in Attrs:
  719. elem = elem[1]
  720. name = []
  721. for k in ("cn", "mn", "sn"):
  722. try:
  723. value = elem[k][0].decode()
  724. if value and value[0] != "-":
  725. name.append(value)
  726. except KeyError:
  727. pass
  728. users[" ".join(name)] = elem["uid"][0]
  729. return users
  730. ################################################################################
  731. def clean_symlink(src: str, dest: str, root: str) -> str:
  732. """
  733. Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
  734. Returns fixed 'src'
  735. """
  736. src = src.replace(root, "", 1)
  737. dest = dest.replace(root, "", 1)
  738. dest = os.path.dirname(dest)
  739. new_src = "../" * len(dest.split("/"))
  740. return new_src + src
  741. ################################################################################
  742. def temp_dirname(
  743. parent: Optional[str] = None,
  744. prefix: str = "dak",
  745. suffix: str = "",
  746. mode: Optional[int] = None,
  747. group: Optional[str] = None,
  748. ) -> str:
  749. """
  750. Return a secure and unique directory by pre-creating it.
  751. :param parent: If non-null it will be the directory the directory is pre-created in.
  752. :param prefix: The filename will be prefixed with this string
  753. :param suffix: The filename will end with this string
  754. :param mode: If set the file will get chmodded to those permissions
  755. :param group: If set the file will get chgrped to the specified group.
  756. :return: Returns a pair (fd, name)
  757. """
  758. tfname = tempfile.mkdtemp(suffix, prefix, parent)
  759. if mode is not None:
  760. os.chmod(tfname, mode)
  761. if group is not None:
  762. gid = grp.getgrnam(group).gr_gid
  763. os.chown(tfname, -1, gid)
  764. return tfname
  765. ################################################################################
  766. def get_changes_files(from_dir: str) -> list[str]:
  767. """
  768. Takes a directory and lists all .changes files in it (as well as chdir'ing
  769. to the directory; this is due to broken behaviour on the part of p-u/p-a
  770. when you're not in the right place)
  771. Returns a list of filenames
  772. """
  773. try:
  774. # Much of the rest of p-u/p-a depends on being in the right place
  775. os.chdir(from_dir)
  776. changes_files = [x for x in os.listdir(from_dir) if x.endswith(".changes")]
  777. except OSError as e:
  778. fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
  779. return changes_files
  780. ################################################################################
  781. Cnf = config.Config().Cnf
  782. ################################################################################
  783. def parse_wnpp_bug_file(
  784. file: str = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm",
  785. ) -> dict[str, list[str]]:
  786. """
  787. Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
  788. Well, actually it parsed a local copy, but let's document the source
  789. somewhere ;)
  790. returns a dict associating source package name with a list of open wnpp
  791. bugs (Yes, there might be more than one)
  792. """
  793. try:
  794. with open(file) as f:
  795. lines = f.readlines()
  796. except OSError:
  797. print(
  798. "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any."
  799. % file
  800. )
  801. lines = []
  802. wnpp = {}
  803. for line in lines:
  804. splited_line = line.split(": ", 1)
  805. if len(splited_line) > 1:
  806. wnpp[splited_line[0]] = splited_line[1].split("|")
  807. for source in wnpp:
  808. bugs = []
  809. for wnpp_bug in wnpp[source]:
  810. bug_no = re.search(r"(\d)+", wnpp_bug).group()
  811. if bug_no:
  812. bugs.append(bug_no)
  813. wnpp[source] = bugs
  814. return wnpp
  815. ################################################################################
  816. def deb_extract_control(path: str) -> bytes:
  817. """extract DEBIAN/control from a binary package"""
  818. return apt_inst.DebFile(path).control.extractdata("control")
  819. ################################################################################
  820. def mail_addresses_for_upload(
  821. maintainer: str,
  822. changed_by: str,
  823. fingerprint: str,
  824. authorized_by_fingerprint: Optional[str],
  825. ) -> list[str]:
  826. """mail addresses to contact for an upload
  827. :param maintainer: Maintainer field of the .changes file
  828. :param changed_by: Changed-By field of the .changes file
  829. :param fingerprint: fingerprint of the key used to sign the upload
  830. :return: list of RFC 2047-encoded mail addresses to contact regarding
  831. this upload
  832. """
  833. recipients = Cnf.value_list("Dinstall::UploadMailRecipients")
  834. if not recipients:
  835. recipients = [
  836. "maintainer",
  837. "changed_by",
  838. "signer",
  839. "authorized_by",
  840. ]
  841. # Ensure signer and authorized_by are last if present
  842. for r in ("signer", "authorized_by"):
  843. try:
  844. recipients.remove(r)
  845. except ValueError:
  846. pass
  847. else:
  848. recipients.append(r)
  849. # Compute the set of addresses of the recipients
  850. addresses = set() # Name + email
  851. emails = set() # Email only, used to avoid duplicates
  852. for recipient in recipients:
  853. if recipient.startswith("mail:"): # Email hardcoded in config
  854. address = recipient[5:]
  855. elif recipient == "maintainer":
  856. address = maintainer
  857. elif recipient == "changed_by":
  858. address = changed_by
  859. elif recipient == "signer" or recipient == "authorized_by":
  860. fpr = fingerprint if recipient == "signer" else authorized_by_fingerprint
  861. if not fpr:
  862. continue
  863. fpr_addresses = gpg_get_key_addresses(fpr)
  864. address = fpr_addresses[0] if fpr_addresses else None
  865. if any(x in emails for x in fpr_addresses):
  866. # The signer already gets a copy via another email
  867. address = None
  868. else:
  869. raise Exception(
  870. "Unsupported entry in {0}: {1}".format(
  871. "Dinstall::UploadMailRecipients", recipient
  872. )
  873. )
  874. if address is not None:
  875. mail = fix_maintainer(address)[3]
  876. if mail not in emails:
  877. addresses.add(address)
  878. emails.add(mail)
  879. encoded_addresses = [fix_maintainer(e)[1] for e in addresses]
  880. return encoded_addresses
  881. ################################################################################
  882. def call_editor_for_file(path: str) -> None:
  883. editor = os.environ.get("VISUAL", os.environ.get("EDITOR", "sensible-editor"))
  884. subprocess.check_call([editor, path])
  885. ################################################################################
  886. def call_editor(text: str = "", suffix: str = ".txt") -> str:
  887. """run editor and return the result as a string
  888. :param text: initial text
  889. :param suffix: extension for temporary file
  890. :return: string with the edited text
  891. """
  892. with tempfile.NamedTemporaryFile(mode="w+t", suffix=suffix) as fh:
  893. print(text, end="", file=fh)
  894. fh.flush()
  895. call_editor_for_file(fh.name)
  896. fh.seek(0)
  897. return fh.read()
  898. ################################################################################
  899. def check_reverse_depends(
  900. removals: Iterable[str],
  901. suite: str,
  902. arches: Optional[Iterable[Architecture]] = None,
  903. session=None,
  904. cruft: bool = False,
  905. quiet: bool = False,
  906. include_arch_all: bool = True,
  907. ) -> bool:
  908. dbsuite = get_suite(suite, session)
  909. overridesuite = dbsuite
  910. if dbsuite.overridesuite is not None:
  911. overridesuite = get_suite(dbsuite.overridesuite, session)
  912. dep_problem = False
  913. p2c = {}
  914. all_broken = defaultdict(lambda: defaultdict(set))
  915. if arches:
  916. all_arches = set(arches)
  917. else:
  918. all_arches = set(x.arch_string for x in get_suite_architectures(suite))
  919. all_arches -= set(["source", "all"])
  920. removal_set = set(removals)
  921. metakey_d = get_or_set_metadatakey("Depends", session)
  922. metakey_p = get_or_set_metadatakey("Provides", session)
  923. params = {
  924. "suite_id": dbsuite.suite_id,
  925. "metakey_d_id": metakey_d.key_id,
  926. "metakey_p_id": metakey_p.key_id,
  927. }
  928. if include_arch_all:
  929. rdep_architectures = all_arches | set(["all"])
  930. else:
  931. rdep_architectures = all_arches
  932. for architecture in rdep_architectures:
  933. deps = {}
  934. sources = {}
  935. virtual_packages = {}
  936. try:
  937. params["arch_id"] = get_architecture(architecture, session).arch_id
  938. except AttributeError:
  939. continue
  940. statement = sql.text(
  941. """
  942. SELECT b.package, s.source, c.name as component,
  943. (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
  944. (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
  945. FROM binaries b
  946. JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
  947. JOIN source s ON b.source = s.id
  948. JOIN files_archive_map af ON b.file = af.file_id
  949. JOIN component c ON af.component_id = c.id
  950. WHERE b.architecture = :arch_id"""
  951. )
  952. query = (
  953. session.query(
  954. sql.column("package"),
  955. sql.column("source"),
  956. sql.column("component"),
  957. sql.column("depends"),
  958. sql.column("provides"),
  959. )
  960. .from_statement(statement)
  961. .params(params)
  962. )
  963. for package, source, component, depends, provides in query:
  964. sources[package] = source
  965. p2c[package] = component
  966. if depends is not None:
  967. deps[package] = depends
  968. # Maintain a counter for each virtual package. If a
  969. # Provides: exists, set the counter to 0 and count all
  970. # provides by a package not in the list for removal.
  971. # If the counter stays 0 at the end, we know that only
  972. # the to-be-removed packages provided this virtual
  973. # package.
  974. if provides is not None:
  975. for virtual_pkg in provides.split(","):
  976. virtual_pkg = virtual_pkg.strip()
  977. if virtual_pkg == package:
  978. continue
  979. if virtual_pkg not in virtual_packages:
  980. virtual_packages[virtual_pkg] = 0
  981. if package not in removals:
  982. virtual_packages[virtual_pkg] += 1
  983. # If a virtual package is only provided by the to-be-removed
  984. # packages, treat the virtual package as to-be-removed too.
  985. removal_set.update(
  986. virtual_pkg
  987. for virtual_pkg in virtual_packages
  988. if not virtual_packages[virtual_pkg]
  989. )
  990. # Check binary dependencies (Depends)
  991. for package in deps:
  992. if package in removals:
  993. continue
  994. try:
  995. parsed_dep = apt_pkg.parse_depends(deps[package])
  996. except ValueError as e:
  997. print("Error for package %s: %s" % (package, e))
  998. parsed_dep = []
  999. for dep in parsed_dep:
  1000. # Check for partial breakage. If a package has a ORed
  1001. # dependency, there is only a dependency problem if all
  1002. # packages in the ORed depends will be removed.
  1003. unsat = 0
  1004. for dep_package, _, _ in dep:
  1005. if dep_package in removals:
  1006. unsat += 1
  1007. if unsat == len(dep):
  1008. component = p2c[package]
  1009. source = sources[package]
  1010. if component != "main":
  1011. source = "%s/%s" % (source, component)
  1012. all_broken[source][package].add(architecture)
  1013. dep_problem = True
  1014. if all_broken and not quiet:
  1015. if cruft:
  1016. print(" - broken Depends:")
  1017. else:
  1018. print("# Broken Depends:")
  1019. for source, bindict in sorted(all_broken.items()):
  1020. lines = []
  1021. for binary, arches in sorted(bindict.items()):
  1022. if arches == all_arches or "all" in arches:
  1023. lines.append(binary)
  1024. else:
  1025. lines.append("%s [%s]" % (binary, " ".join(sorted(arches))))
  1026. if cruft:
  1027. print(" %s: %s" % (source, lines[0]))
  1028. else:
  1029. print("%s: %s" % (source, lines[0]))
  1030. for line in lines[1:]:
  1031. if cruft:
  1032. print(" " + " " * (len(source) + 2) + line)
  1033. else:
  1034. print(" " * (len(source) + 2) + line)
  1035. if not cruft:
  1036. print()
  1037. # Check source dependencies (Build-Depends and Build-Depends-Indep)
  1038. all_broken = defaultdict(set)
  1039. metakey_bd = get_or_set_metadatakey("Build-Depends", session)
  1040. metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
  1041. if include_arch_all:
  1042. metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
  1043. else:
  1044. metakey_ids = (metakey_bd.key_id,)
  1045. params = {
  1046. "suite_id": dbsuite.suite_id,
  1047. "metakey_ids": metakey_ids,
  1048. }
  1049. statement = sql.text(
  1050. """
  1051. SELECT s.source, string_agg(sm.value, ', ') as build_dep
  1052. FROM source s
  1053. JOIN source_metadata sm ON s.id = sm.src_id
  1054. WHERE s.id in
  1055. (SELECT src FROM newest_src_association
  1056. WHERE suite = :suite_id)
  1057. AND sm.key_id in :metakey_ids
  1058. GROUP BY s.id, s.source"""
  1059. )
  1060. query = (
  1061. session.query(sql.column("source"), sql.column("build_dep"))
  1062. .from_statement(statement)
  1063. .params(params)
  1064. )
  1065. for source, build_dep in query:
  1066. if source in removals:
  1067. continue
  1068. parsed_dep = []
  1069. if build_dep is not None:
  1070. # Remove [arch] information since we want to see breakage on all arches
  1071. build_dep = re_build_dep_arch.sub("", build_dep)
  1072. try:
  1073. parsed_dep = apt_pkg.parse_src_depends(build_dep)
  1074. except ValueError as e:
  1075. print("Error for source %s: %s" % (source, e))
  1076. for dep in parsed_dep:
  1077. unsat = 0
  1078. for dep_package, _, _ in dep:
  1079. if dep_package in removals:
  1080. unsat += 1
  1081. if unsat == len(dep):
  1082. (component,) = (
  1083. session.query(Component.component_name)
  1084. .join(Component.overrides)
  1085. .filter(Override.suite == overridesuite)
  1086. .filter(
  1087. Override.package
  1088. == re.sub("/(contrib|non-free-firmware|non-free)$", "", source)
  1089. )
  1090. .join(Override.overridetype)
  1091. .filter(OverrideType.overridetype == "dsc")
  1092. .first()
  1093. )
  1094. key = source
  1095. if component != "main":
  1096. key = "%s/%s" % (source, component)
  1097. all_broken[key].add(pp_deps(dep))
  1098. dep_problem = True
  1099. if all_broken and not quiet:
  1100. if cruft:
  1101. print(" - broken Build-Depends:")
  1102. else:
  1103. print("# Broken Build-Depends:")
  1104. for source, bdeps in sorted(all_broken.items()):
  1105. bdeps = sorted(bdeps)
  1106. if cruft:
  1107. print(" %s: %s" % (source, bdeps[0]))
  1108. else:
  1109. print("%s: %s" % (source, bdeps[0]))
  1110. for bdep in bdeps[1:]:
  1111. if cruft:
  1112. print(" " + " " * (len(source) + 2) + bdep)
  1113. else:
  1114. print(" " * (len(source) + 2) + bdep)
  1115. if not cruft:
  1116. print()
  1117. return dep_problem
  1118. ################################################################################
  1119. def parse_built_using(control: Mapping[str, str]) -> list[tuple[str, str]]:
  1120. """source packages referenced via Built-Using
  1121. :param control: control file to take Built-Using field from
  1122. :return: list of (source_name, source_version) pairs
  1123. """
  1124. built_using = control.get("Built-Using", None)
  1125. if built_using is None:
  1126. return []
  1127. bu = []
  1128. for dep in apt_pkg.parse_depends(built_using):
  1129. assert len(dep) == 1, "Alternatives are not allowed in Built-Using field"
  1130. source_name, source_version, comp = dep[0]
  1131. assert comp == "=", "Built-Using must contain strict dependencies"
  1132. bu.append((source_name, source_version))
  1133. return bu
  1134. ################################################################################
  1135. def is_in_debug_section(control: Mapping[str, str]) -> bool:
  1136. """binary package is a debug package
  1137. :param control: control file of binary package
  1138. :return: True if the binary package is a debug package
  1139. """
  1140. section = control["Section"].split("/", 1)[-1]
  1141. auto_built_package = control.get("Auto-Built-Package")
  1142. return section == "debug" and auto_built_package == "debug-symbols"
  1143. ################################################################################
  1144. def find_possibly_compressed_file(filename: str) -> str:
  1145. """
  1146. :param filename: path to a control file (Sources, Packages, etc) to
  1147. look for
  1148. :return: path to the (possibly compressed) control file, or null if the
  1149. file doesn't exist
  1150. """
  1151. _compressions = ("", ".xz", ".gz", ".bz2")
  1152. for ext in _compressions:
  1153. _file = filename + ext
  1154. if os.path.exists(_file):
  1155. return _file
  1156. raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), filename)
  1157. ################################################################################
  1158. def parse_boolean_from_user(value: str) -> bool:
  1159. value = value.lower()
  1160. if value in {"yes", "true", "enable", "enabled"}:
  1161. return True
  1162. if value in {"no", "false", "disable", "disabled"}:
  1163. return False
  1164. raise ValueError("Not sure whether %s should be a True or a False" % value)
  1165. def suite_suffix(suite_name: str) -> str:
  1166. """Return suite_suffix for the given suite"""
  1167. suffix = Cnf.find("Dinstall::SuiteSuffix", "")
  1168. if suffix == "":
  1169. return ""
  1170. elif "Dinstall::SuiteSuffixSuites" not in Cnf:
  1171. # TODO: warn (once per run) that SuiteSuffix will be deprecated in the future
  1172. return suffix
  1173. elif suite_name in Cnf.value_list("Dinstall::SuiteSuffixSuites"):
  1174. return suffix
  1175. return ""
  1176. ################################################################################
  1177. def process_buildinfos(
  1178. directory: str,
  1179. buildinfo_files: "Iterable[daklib.upload.HashedFile]",
  1180. fs_transaction: "daklib.fstransactions.FilesystemTransaction",
  1181. logger: "daklib.daklog.Logger",
  1182. ) -> None:
  1183. """Copy buildinfo files into Dir::BuildinfoArchive
  1184. :param directory: directory where .changes is stored
  1185. :param buildinfo_files: names of buildinfo files
  1186. :param fs_transaction: FilesystemTransaction instance
  1187. :param logger: logger instance
  1188. """
  1189. if "Dir::BuildinfoArchive" not in Cnf:
  1190. return
  1191. target_dir = os.path.join(
  1192. Cnf["Dir::BuildinfoArchive"],
  1193. datetime.datetime.now().strftime("%Y/%m/%d"),
  1194. )
  1195. for f in buildinfo_files:
  1196. src = os.path.join(directory, f.filename)
  1197. dst = find_next_free(os.path.join(target_dir, f.filename))
  1198. logger.log(["Archiving", f.filename])
  1199. fs_transaction.copy(src, dst, mode=0o644)
  1200. ################################################################################
  1201. def move_to_morgue(
  1202. morguesubdir: str,
  1203. filenames: Iterable[str],
  1204. fs_transaction: "daklib.fstransactions.FilesystemTransaction",
  1205. logger: "daklib.daklog.Logger",
  1206. ):
  1207. """Move a file to the correct dir in morgue
  1208. :param morguesubdir: subdirectory of morgue where this file needs to go
  1209. :param filenames: names of files
  1210. :param fs_transaction: FilesystemTransaction instance
  1211. :param logger: logger instance
  1212. """
  1213. morguedir = Cnf.get("Dir::Morgue", os.path.join(Cnf.get("Dir::Base"), "morgue"))
  1214. # Build directory as morguedir/morguesubdir/year/month/day
  1215. now = datetime.datetime.now()
  1216. dest = os.path.join(
  1217. morguedir, morguesubdir, str(now.year), "%.2d" % now.month, "%.2d" % now.day
  1218. )
  1219. for filename in filenames:
  1220. dest_filename = dest + "/" + os.path.basename(filename)
  1221. # If the destination file exists; try to find another filename to use
  1222. if os.path.lexists(dest_filename):
  1223. dest_filename = find_next_free(dest_filename)
  1224. logger.log(["move to morgue", filename, dest_filename])
  1225. fs_transaction.move(filename, dest_filename)