Projects
Kolab:Winterfell
bonnie
Log In
Username
Password
Overview
Repositories
Revisions
Requests
Users
Attributes
Meta
Expand all
Collapse all
Changes of Revision 4
View file
bonnie.spec
Changed
@@ -17,7 +17,7 @@ %global bonnie_group_id 415 Name: bonnie -Version: 0.3.4 +Version: 0.3.5 Release: 1%{?dist} Summary: Bonnie for Kolab Groupware @@ -145,7 +145,9 @@ %{buildroot}/%{_sysconfdir}/%{name} \ %{buildroot}/%{_bindir} \ %{buildroot}/%{_sbindir} \ - %{buildroot}/%{python_sitelib} + %{buildroot}/%{python_sitelib} \ + %{buildroot}/%{_var}/lib/%{name} \ + %{buildroot}/%{_var}/log/%{name} %{__install} -m640 -p conf/bonnie.conf %{buildroot}/%{_sysconfdir}/%{name} @@ -203,6 +205,8 @@ %pre -n bonnie-collector # And allow cyrus access to bonnie.conf gpasswd -a cyrus bonnie >/dev/null 2>&1 +# Allow bonnie access to kolab.conf +gpasswd -a bonnie kolab >/dev/null 2>&1 %pre -n bonnie-dealer # And allow cyrus access to bonnie.conf @@ -310,6 +314,8 @@ %{python_sitelib}/bonnie/translate.py* %{python_sitelib}/bonnie/utils.py* %{python_sitelib}/bonnie/plugins/ +%attr(0750,%{bonnie_user},%{bonnie_group}) %{_var}/lib/%{name} +%attr(0750,%{bonnie_user},%{bonnie_group}) %{_var}/log/%{name} %files elasticsearch %defattr(-,root,root,-)
View file
bonnie-0.3.4.tar.gz/bonnie/collector/handlers/imapdata.py -> bonnie-0.3.5.tar.gz/bonnie/collector/handlers/imapdata.py
Changed
@@ -29,6 +29,7 @@ conf = bonnie.getConf() log = bonnie.getLogger('bonnie.collector.IMAPDataHandler') + class IMAPDataHandler(object): """ Collector handler to provide metadata from IMAP
View file
bonnie-0.3.4.tar.gz/bonnie/logger.py -> bonnie-0.3.5.tar.gz/bonnie/logger.py
Changed
@@ -28,6 +28,7 @@ from bonnie.translate import _ + class Logger(logging.Logger): """ The Bonnie version of a logger. @@ -44,7 +45,7 @@ for arg in sys.argv: value = None if '=' in arg: - (arg,value) = arg.split('=')[0:2] + (arg, value) = arg.split('=')[0:2] if '-d' == arg or '--debug' == arg: debuglevel = value if value is not None else -1 @@ -53,7 +54,7 @@ if debuglevel == -1: try: debuglevel = int(arg) - except ValueError, errmsg: + except ValueError: debuglevel = 0 continue @@ -65,8 +66,8 @@ continue if loglevel == -1: - if hasattr(logging,arg.upper()): - loglevel = getattr(logging,arg.upper()) + if hasattr(logging, arg.upper()): + loglevel = getattr(logging, arg.upper()) else: loglevel = logging.DEBUG @@ -83,7 +84,7 @@ continue def __init__(self, *args, **kw): - if kw.has_key('name'): + if 'name' in kw: name = kw['name'] elif len(args) == 1: name = args[0] @@ -92,7 +93,9 @@ logging.Logger.__init__(self, name) - plaintextformatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s %(message)s") + plaintextformatter = logging.Formatter( + "%(asctime)s %(name)s %(levelname)s %(message)s" + ) if not self.fork: self.console_stdout = logging.StreamHandler(sys.stdout) @@ -100,7 +103,7 @@ self.addHandler(self.console_stdout) - if kw.has_key('logfile'): + if 'logfile' in kw: self.logfile = kw['logfile'] elif self.logfile is None: self.logfile = '/var/log/bonnie/bonnie.log' @@ -111,7 +114,7 @@ try: (ruid, euid, suid) = os.getresuid() (rgid, egid, sgid) = os.getresgid() - except AttributeError, errmsg: + except AttributeError: ruid = os.getuid() rgid = os.getgid() @@ -123,8 +126,8 @@ pwd.getpwnam('cyrus')[2], grp.getgrnam('mail')[2] ) - os.chmod(self.logfile, 0660) - except: + os.chmod(self.logfile, 660) + except Exception: pass # Make sure the log file exists @@ -138,13 +141,18 @@ try: filelog_handler = logging.FileHandler(filename=self.logfile) filelog_handler.setFormatter(plaintextformatter) - except IOError, e: - print >> sys.stderr, _("Cannot log to file %s: %s") % (self.logfile, e) + except IOError, errmsg: + print >> sys.stderr, _( + "Cannot log to file %s: %s" + ) % ( + self.logfile, + errmsg + ) if not len(self.handlers) > 1: try: self.addHandler(filelog_handler) - except: + except Exception, errmsg: pass except IOError, errmsg: @@ -156,7 +164,8 @@ self.removeHandler(self.console_stdout) def info(self, msg, *args): - # Suppress info messages from other applications according to debug level + # Suppress info messages from other applications according to debug + # level if self.name.startswith('sqlalchemy') and self.debuglevel < 9: return if not self.name.startswith('bonnie') and self.debuglevel < 8: @@ -171,7 +180,7 @@ if level <= self.debuglevel: # TODO: Not the way it's supposed to work! - self.log(logging.DEBUG, '[%d]: %s' % (os.getpid(),msg)) + self.log(logging.DEBUG, '[%d]: %s' % (os.getpid(), msg)) logging.setLoggerClass(Logger)
View file
bonnie-0.3.4.tar.gz/bonnie/worker/__init__.py -> bonnie-0.3.5.tar.gz/bonnie/worker/__init__.py
Changed
@@ -47,8 +47,8 @@ worker_group.add_option( "-n", - "--num-childs", - dest = "num_childs", + "--num-children", + dest = "num_children", action = "store", default = None, help = "Number of child processes to spawn" @@ -56,7 +56,7 @@ super(BonnieWorker, self).__init__(*args, **kw) - self.childs = [] + self.children = [] self.manager = False self.running = False @@ -66,13 +66,13 @@ :class:`worker processes <bonnie.worker.BonnieWorkerProcess>` """ - num_childs = conf.num_childs or conf.get('worker', 'num_childs') - if num_childs is not None: - num_childs = int(num_childs) + num_children = conf.num_children or conf.get('worker', 'num_children') + if num_children is not None: + num_children = int(num_children) - if num_childs is None or num_childs < 1: + if num_children is None or num_children < 1: main = BonnieWorkerProcess() - self.childs.append(main) + self.children.append(main) main.run() # blocking else: conf.fork_mode = False @@ -81,16 +81,16 @@ while self.running: # (re)start child worker processes - while len(self.childs) < num_childs: + while len(self.children) < num_children: p = Process(target=self.run_child) - self.childs.append(p) + self.children.append(p) p.start() # check states of child processes - for p in self.childs: + for p in self.children: if not p.is_alive(): log.info("Restarting dead worker process %r", p.pid) - self.childs.remove(p) + self.children.remove(p) time.sleep(10) @@ -108,11 +108,11 @@ Stop the worker daemon. """ self.running = False - for p in self.childs: + for p in self.children: p.terminate() if self.manager: - for p in self.childs: + for p in self.children: p.join() class BonnieWorkerProcess(object):
View file
bonnie-0.3.4.tar.gz/bonnie/worker/outputs/elasticsearch_output.py -> bonnie-0.3.5.tar.gz/bonnie/worker/outputs/elasticsearch_output.py
Changed
@@ -50,7 +50,7 @@ def register(self, callback): self.worker = callback({'_all': { 'callback': self.run }}) - def notification2log(self, notification): + def notification_to_log(self, notification): """ Convert the given event notification record into a valid log entry """ @@ -96,7 +96,10 @@ # The output should have UTC timestamps, but gets "2014-05-16T12:55:53.870+02:00" try: timestamp = parse(notification['timestamp']).astimezone(tzutc()) - except: + except Exception, errmsg: + import traceback + log.error("Exception %r" % (errmsg)) + log.error("%s" % (traceback.format_exc())) timestamp = datetime.datetime.now(tzutc()) notification['@timestamp'] = datetime.datetime.strftime(timestamp, "%Y-%m-%dT%H:%M:%S.%fZ") @@ -118,25 +121,25 @@ notification.pop('vnd.cmu.envelope', None) try: - self.es.create( + self.es.index( index=index, doc_type='logs', - body=self.notification2log(notification) + body=self.notification_to_log(notification) ) except Exception, e: - log.warning("ES create exception: %r", e) + log.warning("ES index(create or update) exception: %r", e) jobs.append(b'POSTPONE') break else: try: - self.es.create( + self.es.index( index=index, doc_type='logs', - body=self.notification2log(notification) + body=self.notification_to_log(notification) ) except Exception, e: - log.warning("ES create exception: %r", e) + log.warning("ES index(create or update) exception: %r", e) jobs.append(b'POSTPONE') return (notification, jobs)
View file
bonnie-0.3.4.tar.gz/bonnie/worker/outputs/riak_output.py -> bonnie-0.3.5.tar.gz/bonnie/worker/outputs/riak_output.py
Changed
@@ -50,7 +50,7 @@ def register(self, callback): self.worker = callback({'_all': { 'callback': self.run }}) - def notification2log(self, notification): + def notification_to_log(self, notification): """ Convert the given event notification record into a valid log entry """ @@ -121,7 +121,7 @@ self.riak.create( index=index, doc_type='logs', - body=self.notification2log(notification) + body=self.notification_to_log(notification) ) except Exception, errmsg: log.warning("Riak create exception: %r", e) @@ -133,7 +133,7 @@ self.riak.create( index=index, doc_type='logs', - body=self.notification2log(notification) + body=self.notification_to_log(notification) ) except Exception, errmsg: log.warning("Riak create exception: %r", e)
View file
bonnie-0.3.4.tar.gz/bonnie/worker/storage/elasticsearch_storage.py -> bonnie-0.3.5.tar.gz/bonnie/worker/storage/elasticsearch_storage.py
Changed
@@ -84,7 +84,7 @@ } ) - def get(self, key, index=None, doctype=None, fields=None, **kw): + def get(self, key, index=None, doctype=None, stored_fields=None, **kw): """ Standard API for accessing key/value storage """ @@ -95,7 +95,7 @@ index = _index, doc_type = _doctype, id = key, - _source_include = fields or '*' + _source_include = stored_fields or '*' ) log.debug( @@ -143,7 +143,7 @@ index = _index, doc_type = _doctype, id = key, - fields = None + stored_fields = None ) log.debug( @@ -165,13 +165,11 @@ if existing is None: try: - ret = self.es.create( + ret = self.es.index( index = _index, doc_type = _doctype, id = key, - body = value, - consistency = 'one', - replication = 'async' + body = value ) log.debug( @@ -185,7 +183,7 @@ ) except Exception, errmsg: - log.warning("ES create exception: %r" % (errmsg)) + log.warning("ES index(create or update) exception: %r" % (errmsg)) ret = None else: @@ -194,9 +192,7 @@ index = _index, doc_type = _doctype, id = key, - body = { 'doc': value }, - consistency = 'one', - replication = 'async' + body = { 'doc': value } ) log.debug( @@ -214,7 +210,7 @@ return ret - def select(self, query, index=None, doctype=None, fields=None, sortby=None, limit=None, **kw): + def select(self, query, index=None, doctype=None, stored_fields=None, sortby=None, limit=None, **kw): """ Standard API for querying storage @@ -225,7 +221,7 @@ - a tuple with two values for range queries @param index: Index name (i.e. database name) @param doctype: Document type (i.e. table name) - @param fields: List of fields to retrieve (string, comma-separated) + @param stored_fields: List of stored_fields to retrieve (string, comma-separated) @param sortby: Fields to be used fort sorting the results (string, comma-separated) @param limit: Number of records to return """ @@ -233,7 +229,7 @@ args = dict( index = index or self.default_index, doc_type = doctype or self.default_doctype, - _source_include = fields or '*' + _source_include = stored_fields or '*' ) if isinstance(query, dict): @@ -382,7 +378,7 @@ return user_id - def notificaton2folder(self, notification, attrib='uri'): + def notification_to_folder(self, notification, attrib='uri'): """ Turn the given notification record into a folder document. including the computation of a unique identifier which is a @@ -393,8 +389,10 @@ # re-compose folder uri templ = "imap://%(user)s@%(domain)s@%(host)s/" + if uri['user'] is None: templ = "imap://%(host)s/" + folder_uri = templ % uri + urllib.quote(uri['path']) if not notification.has_key('metadata'): @@ -428,7 +426,7 @@ 'owner': uri['user'] + '@' + uri['domain'] if uri['user'] is not None else 'nobody', 'server': uri['host'], - 'name': re.sub('@.+$', '', uri['path']), + 'name': uri['path'], 'uri': folder_uri, } @@ -504,9 +502,9 @@ # extract folder properties and a unique identifier from the # notification - folder = self.notificaton2folder(notification) + folder = self.notification_to_folder(notification) - # abort if notificaton2folder() failed + # abort if notification_to_folder() failed if folder is False: return (notification, []) @@ -515,7 +513,7 @@ index=self.folders_index, doctype=self.folders_doctype, key=folder['id'], - fields='uniqueid,name' + stored_fields='uniqueid,name' ) # create an entry for the referenced imap folder @@ -551,9 +549,7 @@ 'name': folder['body']['name'], 'uri': folder['body']['uri'] } - }, - consistency = 'one', - replication = 'async' + } ) log.debug("Updated folder object: %r" % (ret), level=8)
View file
bonnie-0.3.4.tar.gz/bonnie/worker/storage/riak_storage.py -> bonnie-0.3.5.tar.gz/bonnie/worker/storage/riak_storage.py
Changed
@@ -382,7 +382,7 @@ return user_id - def notificaton2folder(self, notification, attrib='uri'): + def notification_to_folder(self, notification, attrib='uri'): """ Turn the given notification record into a folder document. including the computation of a unique identifier which is a @@ -504,9 +504,9 @@ # extract folder properties and a unique identifier from the # notification - folder = self.notificaton2folder(notification) + folder = self.notification_to_folder(notification) - # abort if notificaton2folder() failed + # abort if notification_to_folder() failed if folder is False: return (notification, [])
View file
bonnie-0.3.4.tar.gz/conf/bonnie.conf -> bonnie-0.3.5.tar.gz/conf/bonnie.conf
Changed
@@ -1,36 +1,79 @@ [bonnie] +; Which features do we enable? +; +; Valid options: 'archive', 'audit', 'backup', 'dlp' +; +; Supported options: None features = archive, audit, backup, dlp [broker] + +; The broker's dealer router bind address zmq_dealer_router_bind_address = tcp://*:5570 + +; The broker's collector router bind address zmq_collector_router_bind_address = tcp://*:5571 + +; The broker's worker controller bind address zmq_worker_controller_router_bind_address = tcp://*:5572 + +; The broker's worker router bind address zmq_worker_router_bind_address = tcp://*:5573 + zmq_poller_timeout = 100 -state_sql_uri = sqlite:////var/lib/bonnie/state.db +;state_sql_uri = sqlite:// +state_sql_uri = mysql://bonnie:Welcome2KolabSystems@127.0.0.1/bonnie [collector] +; Size of the worker queue -- take at most this many jobs at any one time. num_threads = 5 + +; The input input_modules = zmq_input + +; Where's the input's broker? zmq_broker_address = tcp://localhost:5571 + +; Not currently used zmq_poller_timeout = 100 [dealer] +; How do we output our notifications? output_modules = zmq_output + +; And where to? zmq_broker_address = tcp://localhost:5570 -blacklist_users = cyrus-admin + +; Blacklist the following events for <blacklist_users> -- prevents infinite +; loops. blacklist_events = Login,Logout,AclChange +blacklist_users = cyrus-admin input_exclude_events = [worker] -num_childs = 0 +; The number of child processes to spawn. +num_children = 0 + +; How do we get our notifications to work on? input_modules = zmq_input + +; What do we use to store intermediate data? storage_modules = elasticsearch_storage + +; What do we use to put the result of what we do? output_modules = elasticsearch_output + +; Supply a comma or comma-space separated list of events we need not put out. output_exclude_events = MessageExpunge + +; Where's our controller? zmq_controller_address = tcp://localhost:5572 + +; Where's our router? zmq_worker_router_address = tcp://localhost:5573 + zmq_poller_timeout = 100 + elasticsearch_output_address = localhost elasticsearch_storage_address = localhost
View file
bonnie-0.3.4.tar.gz/contrib/bonnie-broker.sysconfig -> bonnie-0.3.5.tar.gz/contrib/bonnie-broker.sysconfig
Changed
@@ -2,5 +2,4 @@ # # See bonnie-broker --help for more flags. # -FLAGS="--fork -l warning" -DAEMONOPTS="--user bonnie" +FLAGS="-l info --logfile /var/log/bonnie/broker.log"
View file
bonnie-0.3.4.tar.gz/contrib/bonnie-broker.systemd -> bonnie-0.3.5.tar.gz/contrib/bonnie-broker.systemd
Changed
@@ -1,16 +1,12 @@ [Unit] -Description=Wallace Content Filter +Description=Bonnie Broker After=syslog.target network.target [Service] -Type=forking -PIDFile=/run/bonnie/bonnie-broker.pid User=bonnie Group=bonnie EnvironmentFile=/etc/sysconfig/bonnie-broker -ExecStart=/usr/sbin/bonnie-broker $FLAGS --pid-file /run/bonnie/bonnie-broker.pid -ExecReload=/bin/kill -HUP $MAINPID -ExecStop=/bin/kill -TERM $MAINPID +ExecStart=/usr/sbin/bonnie-broker $FLAGS [Install] WantedBy=multi-user.target
View file
bonnie-0.3.4.tar.gz/contrib/bonnie-collector.sysconfig -> bonnie-0.3.5.tar.gz/contrib/bonnie-collector.sysconfig
Changed
@@ -1,6 +1,5 @@ -# Configuration file for the Bonnie Broker daemon service. +# Configuration file for the Bonnie Collector daemon service. # -# See bonnie-broker --help for more flags. +# See bonnie-collector --help for more flags. # -FLAGS="--fork -l warning" -DAEMONOPTS="--user bonnie" +FLAGS="-l info --logfile /var/log/bonnie/collector.log"
View file
bonnie-0.3.4.tar.gz/contrib/bonnie-collector.systemd -> bonnie-0.3.5.tar.gz/contrib/bonnie-collector.systemd
Changed
@@ -1,16 +1,12 @@ [Unit] -Description=Wallace Content Filter +Description=Bonnie Collector After=syslog.target network.target [Service] -Type=forking -PIDFile=/run/bonnie/bonnie-collector.pid User=bonnie Group=bonnie EnvironmentFile=/etc/sysconfig/bonnie-collector -ExecStart=/usr/sbin/bonnie-collector $FLAGS --pid-file /run/bonnie/bonnie-collector.pid -ExecReload=/bin/kill -HUP $MAINPID -ExecStop=/bin/kill -TERM $MAINPID +ExecStart=/usr/sbin/bonnie-collector $FLAGS [Install] WantedBy=multi-user.target
View file
bonnie-0.3.4.tar.gz/contrib/bonnie-worker.sysconfig -> bonnie-0.3.5.tar.gz/contrib/bonnie-worker.sysconfig
Changed
@@ -1,6 +1,5 @@ -# Configuration file for the Bonnie Broker daemon service. +# Configuration file for the Bonnie Worker daemon service. # -# See bonnie-broker --help for more flags. +# See bonnie-worker --help for more flags. # -FLAGS="--fork -l warning" -DAEMONOPTS="--user bonnie" +FLAGS="-l info --logfile /var/log/bonnie/worker.log"
View file
bonnie-0.3.4.tar.gz/contrib/bonnie-worker.systemd -> bonnie-0.3.5.tar.gz/contrib/bonnie-worker.systemd
Changed
@@ -1,16 +1,12 @@ [Unit] -Description=Wallace Content Filter +Description=Bonnie Worker After=syslog.target network.target [Service] -Type=forking -PIDFile=/run/bonnie/bonnie-worker.pid User=bonnie Group=bonnie EnvironmentFile=/etc/sysconfig/bonnie-worker -ExecStart=/usr/sbin/bonnie-worker $FLAGS --pid-file /run/bonnie/bonnie-worker.pid -ExecReload=/bin/kill -HUP $MAINPID -ExecStop=/bin/kill -TERM $MAINPID +ExecStart=/usr/sbin/bonnie-worker $FLAGS [Install] WantedBy=multi-user.target
View file
bonnie-0.3.4.tar.gz/docs/architecture-and-design.rst -> bonnie-0.3.5.tar.gz/docs/architecture-and-design.rst
Changed
@@ -6,6 +6,15 @@ Architecture and Design ======================= +The design principles could be summarized as follows: + +* Eventual consistency is more important than real-time availability, + +* Scalability is more important than one particular technology's individual + efficiency, + +* Reliability of the audit trail + Bonnie receives and parses :term:`event notifications` issued by Cyrus IMAP 2.5.
View file
bonnie-0.3.4.tar.gz/docs/index.rst -> bonnie-0.3.5.tar.gz/docs/index.rst
Changed
@@ -2,13 +2,16 @@ About Bonnie ============ -Bonnie is the answer to the questions about :ref:`about-archival`, +Bonnie builds an audit trail from events that occur against an IMAP server. + +As such, Bonnie is the answer to the questions about :ref:`about-archival`, :ref:`about-backup-and-restore`, :ref:`about-e-discovery` and :ref:`about-data-loss-prevention` for electronic communications. .. toctree:: :maxdepth: 1 + getting-started architecture-and-design technical-documentation/index ..
View file
bonnie-0.3.4.tar.gz/tests/unit/test_001_utils.py -> bonnie-0.3.5.tar.gz/tests/unit/test_001_utils.py
Changed
@@ -19,7 +19,6 @@ # import os -import json from bonnie.utils import expand_uidset from bonnie.utils import parse_imap_uri from bonnie.utils import mail_message2dict @@ -45,11 +44,14 @@ def test_expand_uidset(self): self.assertEqual(expand_uidset('3'), ['3']) - self.assertEqual(expand_uidset('3,5'), ['3','5']) - self.assertEqual(expand_uidset('3:5'), ['3','4','5']) + self.assertEqual(expand_uidset('3,5'), ['3', '5']) + self.assertEqual(expand_uidset('3:5'), ['3', '4', '5']) def test_parse_imap_uri(self): - url = parse_imap_uri("imap://john.doe@example.org@kolab.example.org/Calendar/Personal%20Calendar;UIDVALIDITY=1411487702/;UID=3") + url = parse_imap_uri( + "imap://john.doe@example.org@kolab.example.org/Calendar/Personal%20Calendar;UIDVALIDITY=1411487702/;UID=3" + ) + self.assertEqual(url['host'], 'kolab.example.org') self.assertEqual(url['user'], 'john.doe') self.assertEqual(url['domain'], 'example.org') @@ -62,37 +64,69 @@ self.assertEqual(len(headers['From']), 1) self.assertEqual(len(headers['To']), 2) - self.assertEqual(headers['To'][0], u'Br\u00fcderli, Thomas <thomas.bruederli@example.org>') + self.assertEqual( + headers['To'][0], + u'Br\u00fcderli, Thomas <thomas.bruederli@example.org>' + ) + self.assertEqual(headers['Content-Type'], 'text/plain') self.assertEqual(headers['Date'], '2014-09-24T04:52:00Z') self.assertEqual(headers['Subject'], 'Test') def test_mail_message2dict(self): - message = mail_message2dict(self._get_resource('event_mime_message.eml')) + message = mail_message2dict( + self._get_resource('event_mime_message.eml') + ) self.assertIsInstance(message, dict) - self.assertEqual(message['Subject'], '253E800C973E9FB99D174669001DB19B-FCBB6C4091F28CA0') - self.assertEqual(message['X-Kolab-Type'], 'application/x-vnd.kolab.event') + self.assertEqual( + message['Subject'], + '253E800C973E9FB99D174669001DB19B-FCBB6C4091F28CA0' + ) + + self.assertEqual( + message['X-Kolab-Type'], + 'application/x-vnd.kolab.event' + ) + self.assertEqual(len(message['@parts']), 2) xmlpart = message['@parts'][1] - self.assertEqual(xmlpart['Content-Type'], 'application/calendar+xml; charset=UTF-8; name=kolab.xml') + self.assertEqual( + xmlpart['Content-Type'], + 'application/calendar+xml; charset=UTF-8; name=kolab.xml' + ) message2 = mail_message2dict("FOO") self.assertIsInstance(message2, dict) self.assertEqual(message2['@body'], "FOO") def test_imap_folder_path(self): - p1 = imap_folder_path("imap://john.doe@example.org@kolab.example.org/Calendar;UID=3") + p1 = imap_folder_path( + "imap://john.doe@example.org@kolab.example.org/Calendar;UID=3" + ) + self.assertEqual(p1, "user/john.doe/Calendar@example.org") - p2 = imap_folder_path("imap://john.doe@example.org@kolab.example.org/INBOX;UIDVALIDITY=1411487702") + p2 = imap_folder_path( + "imap://john.doe@example.org@kolab.example.org/INBOX;UIDVALIDITY=1411487702" + ) + self.assertEqual(p2, "user/john.doe@example.org") # test shared folders (but how are they referred in the uri?) - p3 = imap_folder_path("imap://kolab.example.org/Shared%20Folders/shared/Project-X%40example.org;UIDVALIDITY=1412093781/;UID=2") + p3 = imap_folder_path( + "imap://kolab.example.org/Shared%20Folders/shared/Project-X%40example.org;UIDVALIDITY=1412093781/;UID=2" + ) + self.assertEqual(p3, "shared/Project-X@example.org") def test_imap_mailbox_fs_path(self): - path = imap_mailbox_fs_path("imap://john.doe@example.org@kolab.example.org/Calendar/Personal%20Calendar;UID=3") - self.assertEqual(path, "/var/spool/imap/domain/e/example.org/j/user/john^doe/Calendar/Personal Calendar") + path = imap_mailbox_fs_path( + "imap://john.doe@example.org@kolab.example.org/Calendar/Personal%20Calendar;UID=3" + ) + + self.assertEqual( + path, + "/var/spool/imap/domain/e/example.org/j/user/john^doe/Calendar/Personal Calendar" + )
Locations
Projects
Search
Status Monitor
Help
Open Build Service
OBS Manuals
API Documentation
OBS Portal
Reporting a Bug
Contact
Mailing List
Forums
Chat (IRC)
Twitter
Open Build Service (OBS)
is an
openSUSE project
.