__init__.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716
  1. #!/usr/bin/env python
  2. import binascii
  3. import csv
  4. import datetime
  5. from contextlib import closing
  6. import fcntl
  7. try:
  8. import cPickle as pickle
  9. except ImportError:
  10. import _pickle as pickle
  11. try:
  12. import dbhash
  13. except ImportError:
  14. import dbm as dbhash
  15. import shutil
  16. from multiprocessing import Queue
  17. import os.path
  18. import sys
  19. import time
  20. from xml.etree import ElementTree
  21. import zlib
  22. from MAPI.Util import *
  23. import kopano
  24. from kopano import log_exc
  25. """
  26. kopano-backup - a MAPI-level backup/restore tool built on python-kopano.
  27. backup is done incrementally using ICS and can be parallellized over stores.
  28. restore is not parallelized.
  29. items are serialized and maintained in per-folder key-value stores.
  30. metadata such as webapp settings, rules, acls and delegation permissions are also stored per-folder.
  31. basic commands (see --help for all options):
  32. kopano-backup -u user1 -> backup (sync) data for user 'user1' in (new) directory 'user1'
  33. kopano-backup -u user1 -f Inbox -> backup 'Inbox' folder for user 'user1' in (new) directory 'user1'
  34. kopano-backup --restore user1 -> restore data from directory 'user1' to account called 'user1'
  35. kopano-backup --restore -u user2 user1 -> same, but restore to account 'user2'
  36. kopano-backup --stats user1 -> summarize contents of backup directory 'user1', in CSV format
  37. kopano-backup --index user1 -> low-level overview of stored items, in CSV format
  38. options can be combined when this makes sense, for example:
  39. kopano-backup --index user1 -f Inbox/subfolder --recursive --period-begin 2014-01-01
  40. """
  41. def dbopen(path): # XXX unfortunately dbhash.open doesn't seem to accept unicode
  42. return dbhash.open(path.encode(sys.stdout.encoding or 'utf8'), 'c')
  43. def _decode(s):
  44. return s.decode(sys.stdout.encoding or 'utf8')
  45. class BackupWorker(kopano.Worker):
  46. """ each worker takes stores from a queue, and backs them up to disk (or syncs them),
  47. according to the given command-line options; it also detects deleted folders """
  48. def main(self):
  49. config, server, options = self.service.config, self.service.server, self.service.options
  50. while True:
  51. stats = {'changes': 0, 'deletes': 0, 'errors': 0}
  52. self.service.stats = stats # XXX generalize
  53. with log_exc(self.log, stats):
  54. # get store from input queue
  55. (store_entryid, username, path) = self.iqueue.get()
  56. store = server.store(entryid=store_entryid)
  57. user = store.user
  58. # create main directory and lock it
  59. if not os.path.isdir(path):
  60. os.makedirs(path)
  61. with open(path+'/lock', 'w') as lockfile:
  62. fcntl.flock(lockfile.fileno(), fcntl.LOCK_EX)
  63. # backup user and store properties
  64. if not options.folders:
  65. file(path+'/store', 'w').write(dump_props(store.props(), stats, self.log))
  66. if user:
  67. file(path+'/user', 'w').write(dump_props(user.props(), stats, self.log))
  68. if not options.skip_meta:
  69. file(path+'/delegates', 'w').write(dump_delegates(user, server, stats, self.log))
  70. # check command-line options and backup folders
  71. t0 = time.time()
  72. self.log.info('backing up: %s', path)
  73. paths = set()
  74. folders = list(store.folders())
  75. if options.recursive:
  76. folders = sum([[f]+list(f.folders()) for f in folders], [])
  77. for folder in folders:
  78. if (not store.public and \
  79. ((options.skip_junk and folder == store.junk) or \
  80. (options.skip_deleted and folder == store.wastebasket))):
  81. continue
  82. paths.add(folder.path)
  83. self.backup_folder(path, folder, store.subtree, config, options, stats, user, server)
  84. # timestamp deleted folders
  85. if not options.folders:
  86. path_folder = folder_struct(path, options)
  87. for fpath in set(path_folder) - paths:
  88. with closing(dbopen(path_folder[fpath]+'/index')) as db_index:
  89. idx = db_index.get('folder')
  90. d = pickle.loads(idx) if idx else {}
  91. if not d.get('backup_deleted'):
  92. self.log.info('deleted folder: %s', fpath)
  93. d['backup_deleted'] = self.service.timestamp
  94. db_index['folder'] = pickle.dumps(d)
  95. changes = stats['changes'] + stats['deletes']
  96. self.log.info('backing up %s took %.2f seconds (%d changes, ~%.2f/sec, %d errors)',
  97. path, time.time()-t0, changes, changes/(time.time()-t0), stats['errors'])
  98. # return statistics in output queue
  99. self.oqueue.put(stats)
  100. def backup_folder(self, path, folder, subtree, config, options, stats, user, server):
  101. """ backup single folder """
  102. self.log.info('backing up folder: %s', folder.path)
  103. # create directory for subfolders
  104. data_path = path+'/'+folder_path(folder, subtree)
  105. if not os.path.isdir('%s/folders' % data_path):
  106. os.makedirs('%s/folders' % data_path)
  107. # backup folder properties, path, metadata
  108. file(data_path+'/path', 'w').write(folder.path.encode('utf8'))
  109. file(data_path+'/folder', 'w').write(dump_props(folder.props(), stats, self.log))
  110. if not options.skip_meta:
  111. file(data_path+'/acl', 'w').write(dump_acl(folder, user, server, stats, self.log))
  112. file(data_path+'/rules', 'w').write(dump_rules(folder, user, server, stats, self.log))
  113. if options.only_meta:
  114. return
  115. # sync over ICS, using stored 'state'
  116. importer = FolderImporter(folder, data_path, config, options, self.log, stats, self.service)
  117. statepath = '%s/state' % data_path
  118. state = None
  119. if os.path.exists(statepath):
  120. state = file(statepath).read()
  121. self.log.info('found previous folder sync state: %s', state)
  122. new_state = folder.sync(importer, state, log=self.log, stats=stats, begin=options.period_begin, end=options.period_end)
  123. if new_state != state:
  124. file(statepath, 'w').write(new_state)
  125. self.log.info('saved folder sync state: %s', new_state)
  126. class FolderImporter:
  127. """ tracks changes for a given folder """
  128. def __init__(self, *args):
  129. self.folder, self.folder_path, self.config, self.options, self.log, self.stats, self.service = args
  130. def update(self, item, flags):
  131. """ store updated item in 'items' database, and subject and date in 'index' database """
  132. with log_exc(self.log, self.stats):
  133. self.log.debug('folder %s: new/updated document with entryid %s, sourcekey %s', self.folder.sourcekey, item.entryid, item.sourcekey)
  134. with closing(dbopen(self.folder_path+'/items')) as db:
  135. db[item.sourcekey] = zlib.compress(item.dumps(attachments=not self.options.skip_attachments, archiver=False, skip_broken=True))
  136. with closing(dbopen(self.folder_path+'/index')) as db:
  137. orig_prop = item.get_prop(PR_EC_BACKUP_SOURCE_KEY)
  138. if orig_prop:
  139. orig_prop = orig_prop.value.encode('hex').upper()
  140. db[item.sourcekey] = pickle.dumps({
  141. 'subject': item.subject,
  142. 'orig_sourcekey': orig_prop,
  143. 'last_modified': item.last_modified,
  144. 'backup_updated': self.service.timestamp,
  145. })
  146. self.stats['changes'] += 1
  147. def delete(self, item, flags):
  148. """ deleted item from 'items' and 'index' databases """
  149. with log_exc(self.log, self.stats):
  150. with closing(dbopen(self.folder_path+'/items')) as db_items:
  151. with closing(dbopen(self.folder_path+'/index')) as db_index:
  152. if item.sourcekey in db_items: # ICS can generate delete events without update events..
  153. self.log.debug('folder %s: deleted document with sourcekey %s', self.folder.sourcekey, item.sourcekey)
  154. if self.options.deletes in (None, 'yes'):
  155. idx = pickle.loads(db_index[item.sourcekey])
  156. idx['backup_deleted'] = self.service.timestamp
  157. db_index[item.sourcekey] = pickle.dumps(idx)
  158. else:
  159. del db_items[item.sourcekey]
  160. del db_index[item.sourcekey]
  161. self.stats['deletes'] += 1
  162. class Service(kopano.Service):
  163. """ main backup process """
  164. def main(self):
  165. self.timestamp = datetime.datetime.now()
  166. if self.options.restore or self.options.purge:
  167. data_path = _decode(self.args[0].rstrip('/'))
  168. with open(data_path+'/lock', 'w') as lockfile:
  169. fcntl.flock(lockfile.fileno(), fcntl.LOCK_EX)
  170. if self.options.restore:
  171. self.restore(data_path)
  172. elif self.options.purge:
  173. self.purge(data_path)
  174. else:
  175. self.backup()
  176. def backup(self):
  177. """ create backup workers, determine which stores to queue, start the workers, display statistics """
  178. self.iqueue, self.oqueue = Queue(), Queue()
  179. workers = [BackupWorker(self, 'backup%d'%i, nr=i, iqueue=self.iqueue, oqueue=self.oqueue)
  180. for i in range(self.config['worker_processes'])]
  181. for worker in workers:
  182. worker.start()
  183. jobs = self.create_jobs()
  184. for job in jobs:
  185. self.iqueue.put(job)
  186. self.log.info('queued %d store(s) for parallel backup (%s processes)', len(jobs), len(workers))
  187. t0 = time.time()
  188. stats = [self.oqueue.get() for i in range(len(jobs))] # blocking
  189. changes = sum(s['changes'] + s['deletes'] for s in stats)
  190. errors = sum(s['errors'] for s in stats)
  191. self.log.info('queue processed in %.2f seconds (%d changes, ~%.2f/sec, %d errors)',
  192. (time.time()-t0), changes, changes/(time.time()-t0), errors)
  193. def restore(self, data_path):
  194. """ restore data from backup """
  195. # determine store to restore to
  196. self.data_path = data_path # XXX remove var
  197. self.log.info('starting restore of %s', self.data_path)
  198. username = os.path.split(self.data_path)[1]
  199. if self.options.users:
  200. store = self._store(self.options.users[0])
  201. elif self.options.stores:
  202. store = self.server.store(self.options.stores[0])
  203. else:
  204. store = self._store(username)
  205. user = store.user
  206. # start restore
  207. self.log.info('restoring to store %s', store.entryid)
  208. t0 = time.time()
  209. stats = {'changes': 0, 'errors': 0}
  210. # restore metadata (webapp/mapi settings)
  211. if user and not self.options.folders and not self.options.skip_meta:
  212. if os.path.exists('%s/store' % self.data_path):
  213. storeprops = pickle.loads(file('%s/store' % self.data_path).read())
  214. for proptag in (PR_EC_WEBACCESS_SETTINGS_JSON, PR_EC_OUTOFOFFICE_SUBJECT, PR_EC_OUTOFOFFICE_MSG,
  215. PR_EC_OUTOFOFFICE, PR_EC_OUTOFOFFICE_FROM, PR_EC_OUTOFOFFICE_UNTIL):
  216. if PROP_TYPE(proptag) == PT_TSTRING:
  217. proptag = CHANGE_PROP_TYPE(proptag, PT_UNICODE)
  218. value = storeprops.get(proptag)
  219. if value:
  220. store.mapiobj.SetProps([SPropValue(proptag, value)])
  221. store.mapiobj.SaveChanges(KEEP_OPEN_READWRITE)
  222. if os.path.exists('%s/delegates' % self.data_path):
  223. load_delegates(user, self.server, file('%s/delegates' % self.data_path).read(), stats, self.log)
  224. # determine stored and specified folders
  225. path_folder = folder_struct(self.data_path, self.options)
  226. paths = self.options.folders or sorted(path_folder.keys())
  227. if self.options.recursive:
  228. paths = [path2 for path2 in path_folder for path in paths if (path2+'//').startswith(path+'/')]
  229. # restore specified folders
  230. restored = []
  231. for path in paths:
  232. if path not in path_folder:
  233. self.log.error('no such folder: %s', path)
  234. stats['errors'] += 1
  235. else:
  236. # handle --restore-root, filter and start restore
  237. restore_path = _decode(self.options.restore_root)+'/'+path if self.options.restore_root else path
  238. folder = store.subtree.folder(restore_path, create=True)
  239. if (not store.public and \
  240. ((self.options.skip_junk and folder == store.junk) or \
  241. (self.options.skip_deleted and folder == store.wastebasket))):
  242. continue
  243. data_path = path_folder[path]
  244. if self.options.deletes in (None, 'no') and folder_deleted(data_path):
  245. continue
  246. if not self.options.only_meta:
  247. self.restore_folder(folder, path, data_path, store, store.subtree, stats, user, self.server)
  248. restored.append((folder, data_path))
  249. # restore metadata
  250. if not (self.options.sourcekeys or self.options.skip_meta):
  251. self.log.info('restoring metadata')
  252. for (folder, data_path) in restored:
  253. load_acl(folder, user, self.server, file(data_path+'/acl').read(), stats, self.log)
  254. load_rules(folder, user, self.server, file(data_path+'/rules').read(), stats, self.log)
  255. self.log.info('restore completed in %.2f seconds (%d changes, ~%.2f/sec, %d errors)',
  256. time.time()-t0, stats['changes'], stats['changes']/(time.time()-t0), stats['errors'])
  257. def purge(self, data_path):
  258. """ permanently delete old folders/items from backup """
  259. assert not self.options.folders, 'cannot combine --folder with --purge'
  260. stats = {'folders': 0, 'items': 0}
  261. self.data_path = data_path # XXX remove var
  262. path_folder = folder_struct(self.data_path, self.options)
  263. for path, data_path in path_folder.items():
  264. # check if folder was deleted
  265. self.log.info('checking folder: %s', path)
  266. if folder_deleted(data_path):
  267. if (self.timestamp - folder_deleted(data_path)).days > self.options.purge:
  268. self.log.debug('purging folder')
  269. shutil.rmtree(data_path)
  270. stats['folders'] += 1
  271. else: # check all items for deletion
  272. with closing(dbopen(data_path+'/items')) as db_items:
  273. with closing(dbopen(data_path+'/index')) as db_index:
  274. for item, idx in db_index.items():
  275. d = pickle.loads(idx)
  276. backup_deleted = d.get('backup_deleted')
  277. if backup_deleted and (self.timestamp - backup_deleted).days > self.options.purge:
  278. self.log.debug('purging item: %s', item)
  279. stats['items'] += 1
  280. del db_items[item]
  281. del db_index[item]
  282. self.log.info('purged %d folders and %d items', stats['folders'], stats['items'])
  283. def create_jobs(self):
  284. """ check command-line options and determine which stores should be backed up """
  285. output_dir = self.options.output_dir or u''
  286. jobs = []
  287. # specified companies/all users
  288. if self.options.companies or not (self.options.users or self.options.stores):
  289. for company in self.server.companies():
  290. companyname = company.name if company.name != 'Default' else ''
  291. for user in company.users():
  292. if user.store:
  293. jobs.append((user.store, user.name, os.path.join(output_dir, companyname, user.name)))
  294. if company.public_store and not self.options.skip_public:
  295. target = 'public@'+companyname if companyname else 'public'
  296. jobs.append((company.public_store, None, os.path.join(output_dir, companyname, target)))
  297. # specified users
  298. if self.options.users:
  299. for user in self.server.users():
  300. if user.store:
  301. jobs.append((user.store, user.name, os.path.join(output_dir, user.name)))
  302. # specified stores
  303. if self.options.stores:
  304. for store in self.server.stores():
  305. if store.public:
  306. target = 'public' + ('@'+store.company.name if store.company.name != 'Default' else '')
  307. else:
  308. target = store.entryid
  309. jobs.append((store, None, os.path.join(output_dir, target)))
  310. return [(job[0].entryid,)+job[1:] for job in sorted(jobs, reverse=True, key=lambda x: x[0].size)]
  311. def restore_folder(self, folder, path, data_path, store, subtree, stats, user, server):
  312. """ restore single folder (or item in folder) """
  313. # check --sourcekey option (only restore specified item if it exists)
  314. if self.options.sourcekeys:
  315. with closing(dbopen(data_path+'/items')) as db:
  316. if not [sk for sk in self.options.sourcekeys if sk in db]:
  317. return
  318. else:
  319. self.log.info('restoring folder %s', path)
  320. # restore container class
  321. folderprops = pickle.loads(file('%s/folder' % data_path).read())
  322. container_class = folderprops.get(long(PR_CONTAINER_CLASS_W))
  323. if container_class:
  324. folder.container_class = container_class
  325. # load existing sourcekeys in folder, to check for duplicates
  326. existing = set()
  327. table = folder.mapiobj.GetContentsTable(0)
  328. table.SetColumns([PR_SOURCE_KEY, PR_EC_BACKUP_SOURCE_KEY], 0)
  329. for row in table.QueryRows(-1, 0):
  330. if PROP_TYPE(row[1].ulPropTag) != PT_ERROR:
  331. existing.add(row[1].Value.encode('hex').upper())
  332. else:
  333. existing.add(row[0].Value.encode('hex').upper())
  334. # load entry from 'index', so we don't have to unpickle everything
  335. with closing(dbopen(data_path+'/index')) as db:
  336. index = dict((a, pickle.loads(b)) for (a,b) in db.iteritems())
  337. # now dive into 'items', and restore desired items
  338. with closing(dbopen(data_path+'/items')) as db:
  339. # determine sourcekey(s) to restore
  340. sourcekeys = db.keys()
  341. if self.options.sourcekeys:
  342. sourcekeys = [sk for sk in sourcekeys if sk in self.options.sourcekeys]
  343. for sourcekey2 in sourcekeys:
  344. with log_exc(self.log, stats):
  345. # date check against 'index'
  346. last_modified = index[sourcekey2]['last_modified']
  347. if ((self.options.period_begin and last_modified < self.options.period_begin) or
  348. (self.options.period_end and last_modified >= self.options.period_end) or
  349. (index[sourcekey2].get('backup_deleted') and self.options.deletes in (None, 'no'))):
  350. continue
  351. # check for duplicates
  352. if sourcekey2 in existing or index[sourcekey2]['orig_sourcekey'] in existing:
  353. self.log.warning('skipping duplicate item with sourcekey %s', sourcekey2)
  354. else:
  355. # actually restore item
  356. self.log.debug('restoring item with sourcekey %s', sourcekey2)
  357. item = folder.create_item(loads=zlib.decompress(db[sourcekey2]), attachments=not self.options.skip_attachments)
  358. # store original sourcekey or it is lost
  359. try:
  360. item.prop(PR_EC_BACKUP_SOURCE_KEY)
  361. except MAPIErrorNotFound:
  362. item.mapiobj.SetProps([SPropValue(PR_EC_BACKUP_SOURCE_KEY, sourcekey2.decode('hex'))])
  363. item.mapiobj.SaveChanges(0)
  364. stats['changes'] += 1
  365. def _store(self, username):
  366. """ lookup store for username """
  367. if '@' in username:
  368. u, c = username.split('@')
  369. if u == 'public' or u == c:
  370. return self.server.company(c).public_store
  371. else:
  372. return self.server.user(username).store
  373. elif username == 'public':
  374. return self.server.public_store
  375. else:
  376. return self.server.user(username).store
  377. def folder_struct(data_path, options, mapper=None):
  378. """ determine all folders in backup directory """
  379. if mapper is None:
  380. mapper = {}
  381. if os.path.exists(data_path+'/path'):
  382. path = file(data_path+'/path').read().decode('utf8')
  383. mapper[path] = data_path
  384. if os.path.exists(data_path+'/folders'):
  385. for f in os.listdir(data_path+'/folders'):
  386. d = data_path+'/folders/'+f
  387. if os.path.isdir(d):
  388. folder_struct(d, options, mapper)
  389. return mapper
  390. def folder_path(folder, subtree):
  391. """ determine path to folder in backup directory """
  392. path = ''
  393. parent = folder
  394. while parent and parent != subtree:
  395. path = '/folders/'+parent.sourcekey+path
  396. parent = parent.parent
  397. return path[1:]
  398. def folder_deleted(data_path):
  399. if os.path.exists(data_path+'/index'):
  400. with closing(dbhash.open(data_path+'/index')) as db:
  401. idx = db.get('folder')
  402. if idx and pickle.loads(idx).get('backup_deleted'):
  403. return pickle.loads(idx).get('backup_deleted')
  404. return None
  405. def show_contents(data_path, options):
  406. """ summary of contents of backup directory, at the item or folder level, in CSV format """
  407. # setup CSV writer, perform basic checks
  408. writer = csv.writer(sys.stdout)
  409. path_folder = folder_struct(data_path, options)
  410. paths = options.folders or sorted(path_folder)
  411. for path in paths:
  412. if path not in path_folder:
  413. print('no such folder:', path)
  414. sys.exit(-1)
  415. if options.recursive:
  416. paths = [p for p in path_folder if [f for f in paths if p.startswith(f)]]
  417. # loop over folders
  418. for path in paths:
  419. data_path = path_folder[path]
  420. items = []
  421. if options.deletes == 'no' and folder_deleted(data_path):
  422. continue
  423. # filter items on date using 'index' database
  424. if os.path.exists(data_path+'/index'):
  425. with closing(dbhash.open(data_path+'/index')) as db:
  426. for key, value in db.iteritems():
  427. d = pickle.loads(value)
  428. if ((key == 'folder') or
  429. (options.period_begin and d['last_modified'] < options.period_begin) or
  430. (options.period_end and d['last_modified'] >= options.period_end) or
  431. (options.deletes == 'no' and d.get('backup_deleted'))):
  432. continue
  433. items.append((key, d))
  434. # --stats: one entry per folder
  435. if options.stats:
  436. writer.writerow([path.encode(sys.stdout.encoding or 'utf8'), len(items)])
  437. # --index: one entry per item
  438. elif options.index:
  439. items = sorted(items, key=lambda item: item[1]['last_modified'])
  440. for key, d in items:
  441. writer.writerow([key, path.encode(sys.stdout.encoding or 'utf8'), d['last_modified'], d['subject'].encode(sys.stdout.encoding or 'utf8')])
  442. def dump_props(props, stats, log):
  443. """ dump given MAPI properties """
  444. data = {}
  445. with log_exc(log, stats):
  446. data = dict((prop.proptag, prop.mapiobj.Value) for prop in props)
  447. return pickle.dumps(data)
  448. def dump_acl(folder, user, server, stats, log):
  449. """ dump acl for given folder """
  450. rows = []
  451. with log_exc(log, stats):
  452. acl_table = folder.mapiobj.OpenProperty(PR_ACL_TABLE, IID_IExchangeModifyTable, 0, 0)
  453. table = acl_table.GetTable(0)
  454. for row in table.QueryRows(-1,0):
  455. entryid = row[1].Value
  456. try:
  457. row[1].Value = ('user', server.sa.GetUser(entryid, MAPI_UNICODE).Username)
  458. except MAPIErrorNotFound:
  459. try:
  460. row[1].Value = ('group', server.sa.GetGroup(entryid, MAPI_UNICODE).Groupname)
  461. except MAPIErrorNotFound:
  462. log.warning("skipping access control entry for unknown user/group %s", entryid.encode('hex').upper())
  463. continue
  464. rows.append(row)
  465. return pickle.dumps(rows)
  466. def load_acl(folder, user, server, data, stats, log):
  467. """ load acl for given folder """
  468. with log_exc(log, stats):
  469. data = pickle.loads(data)
  470. rows = []
  471. for row in data:
  472. try:
  473. member_type, value = row[1].Value
  474. if member_type == 'user':
  475. entryid = server.user(value).userid
  476. else:
  477. entryid = server.group(value).groupid
  478. row[1].Value = entryid.decode('hex')
  479. rows.append(row)
  480. except kopano.NotFoundError:
  481. log.warning("skipping access control entry for unknown user/group '%s'", value)
  482. acltab = folder.mapiobj.OpenProperty(PR_ACL_TABLE, IID_IExchangeModifyTable, 0, MAPI_MODIFY)
  483. acltab.ModifyTable(0, [ROWENTRY(ROW_ADD, row) for row in rows])
  484. def dump_rules(folder, user, server, stats, log):
  485. """ dump rules for given folder """
  486. ruledata = None
  487. with log_exc(log, stats):
  488. try:
  489. ruledata = folder.prop(PR_RULES_DATA).value
  490. except MAPIErrorNotFound:
  491. pass
  492. else:
  493. etxml = ElementTree.fromstring(ruledata)
  494. for actions in etxml.findall('./item/item/actions'):
  495. for movecopy in actions.findall('.//moveCopy'):
  496. try:
  497. s = movecopy.findall('store')[0]
  498. store = server.mapisession.OpenMsgStore(0, s.text.decode('base64'), None, 0)
  499. guid = HrGetOneProp(store, PR_STORE_RECORD_KEY).Value.encode('hex')
  500. store = server.store(guid) # XXX guid doesn't work for multiserver?
  501. if store.public:
  502. s.text = 'public'
  503. else:
  504. s.text = store.user.name if store != user.store else ''
  505. f = movecopy.findall('folder')[0]
  506. path = store.folder(entryid=f.text.decode('base64').encode('hex')).path
  507. f.text = path
  508. except (kopano.NotFoundError, MAPIErrorNotFound, binascii.Error):
  509. log.warning("cannot serialize rule for unknown store/folder")
  510. ruledata = ElementTree.tostring(etxml)
  511. return pickle.dumps(ruledata)
  512. def load_rules(folder, user, server, data, stats, log):
  513. """ load rules for given folder """
  514. with log_exc(log, stats):
  515. data = pickle.loads(data)
  516. if data:
  517. etxml = ElementTree.fromstring(data)
  518. for actions in etxml.findall('./item/item/actions'):
  519. for movecopy in actions.findall('.//moveCopy'):
  520. try:
  521. s = movecopy.findall('store')[0]
  522. if s.text == 'public':
  523. store = server.public_store
  524. else:
  525. store = server.user(s.text).store if s.text else user.store
  526. s.text = store.entryid.decode('hex').encode('base64').strip()
  527. f = movecopy.findall('folder')[0]
  528. f.text = store.folder(f.text).entryid.decode('hex').encode('base64').strip()
  529. except kopano.NotFoundError:
  530. log.warning("skipping rule for unknown store/folder")
  531. etxml = ElementTree.tostring(etxml)
  532. folder.create_prop(PR_RULES_DATA, etxml)
  533. def _get_fbf(user, flags, log):
  534. try:
  535. fbeid = user.root.prop(PR_FREEBUSY_ENTRYIDS).value[1]
  536. return user.store.mapiobj.OpenEntry(fbeid, None, flags)
  537. except MAPIErrorNotFound:
  538. log.warning("skipping delegation because of missing freebusy data")
  539. def dump_delegates(user, server, stats, log):
  540. """ dump delegate users for given user """
  541. usernames = []
  542. with log_exc(log, stats):
  543. fbf = _get_fbf(user, 0, log)
  544. delegate_uids = []
  545. try:
  546. if fbf:
  547. delegate_uids = HrGetOneProp(fbf, PR_SCHDINFO_DELEGATE_ENTRYIDS).Value
  548. except MAPIErrorNotFound:
  549. pass
  550. for uid in delegate_uids:
  551. try:
  552. usernames.append(server.sa.GetUser(uid, MAPI_UNICODE).Username)
  553. except MAPIErrorNotFound:
  554. log.warning("skipping delegate user for unknown userid")
  555. return pickle.dumps(usernames)
  556. def load_delegates(user, server, data, stats, log):
  557. """ load delegate users for given user """
  558. with log_exc(log, stats):
  559. userids = []
  560. for name in pickle.loads(data):
  561. try:
  562. userids.append(server.user(name).userid.decode('hex'))
  563. except kopano.NotFoundError:
  564. log.warning("skipping delegation for unknown user '%s'", name)
  565. fbf = _get_fbf(user, MAPI_MODIFY, log)
  566. if fbf:
  567. fbf.SetProps([SPropValue(PR_SCHDINFO_DELEGATE_ENTRYIDS, userids)])
  568. fbf.SaveChanges(0)
  569. def main():
  570. # select common options
  571. parser = kopano.parser('ckpsufwUPCSlObe', usage='kopano-backup [PATH] [options]')
  572. # add custom options
  573. parser.add_option('', '--skip-junk', dest='skip_junk', action='store_true', help='skip junk folder')
  574. parser.add_option('', '--skip-deleted', dest='skip_deleted', action='store_true', help='skip deleted items folder')
  575. parser.add_option('', '--skip-public', dest='skip_public', action='store_true', help='skip public store')
  576. parser.add_option('', '--skip-attachments', dest='skip_attachments', action='store_true', help='skip attachments')
  577. parser.add_option('', '--skip-meta', dest='skip_meta', action='store_true', help='skip metadata')
  578. parser.add_option('', '--only-meta', dest='only_meta', action='store_true', help='only backup/restore metadata')
  579. parser.add_option('', '--deletes', dest='deletes', help='store/restore deleted items/folders', metavar='YESNO')
  580. parser.add_option('', '--purge', dest='purge', type='int', help='purge items/folders deleted more than N days ago', metavar='N')
  581. parser.add_option('', '--restore', dest='restore', action='store_true', help='restore from backup')
  582. parser.add_option('', '--restore-root', dest='restore_root', help='restore under specific folder', metavar='PATH')
  583. parser.add_option('', '--stats', dest='stats', action='store_true', help='list folders for PATH')
  584. parser.add_option('', '--index', dest='index', action='store_true', help='list items for PATH')
  585. parser.add_option('', '--sourcekey', dest='sourcekeys', action='append', help='restore specific sourcekey', metavar='SOURCEKEY')
  586. parser.add_option('', '--recursive', dest='recursive', action='store_true', help='backup/restore folders recursively')
  587. # parse and check command-line options
  588. options, args = parser.parse_args()
  589. options.service = False
  590. if options.restore or options.stats or options.index or options.purge:
  591. assert len(args) == 1 and os.path.isdir(args[0]), 'please specify path to backup data'
  592. else:
  593. assert len(args) == 0, 'too many arguments'
  594. if options.deletes and options.deletes not in ('yes', 'no'):
  595. raise Exception("--deletes option takes 'yes' or 'no'")
  596. if options.stats or options.index:
  597. # handle --stats/--index
  598. show_contents(args[0], options)
  599. else:
  600. # start backup/restore
  601. Service('backup', options=options, args=args).start()
  602. if __name__ == '__main__':
  603. main()