Jelajahi Sumber

Merge pull request #241 from jaraco/log-targets

Separate log targets for syslog (and other streams)
Mike Naberezny 12 tahun lalu
induk
melakukan
2ab53646eb

+ 20 - 0
docs/configuration.rst

@@ -844,6 +844,16 @@ where specified.
 
   *Introduced*: 3.0a7
 
+``stdout_syslog``
+
+  If true, stdout will be directed to syslog along with the process name.
+
+  *Default*: False
+
+  *Required*:  No.
+
+  *Introduced*: 3.0b3
+
 ``stderr_logfile``
 
   Put process stderr output in this file unless ``redirect_stderr`` is
@@ -911,6 +921,16 @@ where specified.
 
   *Introduced*: 3.0a7
 
+``stderr_syslog``
+
+  If true, stderr will be directed to syslog along with the process name.
+
+  *Default*: False
+
+  *Required*:  No.
+
+  *Introduced*: 3.0b3
+
 ``environment``
 
   A list of key/value pairs in the form ``KEY="val",KEY2="val2"`` that

+ 3 - 6
docs/logging.rst

@@ -134,13 +134,10 @@ The configuration keys that influence child process logging in
 ``[program:x]`` and ``[fcgi-program:x]`` sections are these:
 
 ``redirect_stderr``, ``stdout_logfile``, ``stdout_logfile_maxbytes``,
-``stdout_logfile_backups``, ``stdout_capture_maxbytes``,
+``stdout_logfile_backups``, ``stdout_capture_maxbytes``, ``stdout_syslog``,
 ``stderr_logfile``, ``stderr_logfile_maxbytes``,
-``stderr_logfile_backups`` and ``stderr_capture_maxbytes``.
-
-One may set ``stdout_logfile`` or ``stderr_logfile`` to the
-special string "syslog". In this case, logs will be routed to the
-syslog service instead of being saved to files.
+``stderr_logfile_backups``, ``stderr_capture_maxbytes``, and
+``stderr_syslog``.
 
 ``[eventlistener:x]`` sections may not specify
 ``stdout_capture_maxbytes`` or ``stderr_capture_maxbytes``,

+ 54 - 27
supervisor/dispatchers.py

@@ -1,3 +1,4 @@
+import warnings
 import errno
 from supervisor.medusa.asyncore_25 import compact_traceback
 
@@ -61,9 +62,15 @@ class PDispatcher:
         pass
 
 class POutputDispatcher(PDispatcher):
-    """ Output (stdout/stderr) dispatcher, capture output sent within
-    <!--XSUPERVISOR:BEGIN--><!--XSUPERVISOR:END--> tags and notify
-    with a ProcessCommunicationEvent """
+    """
+    A Process Output (stdout/stderr) dispatcher. Serves several purposes:
+
+    - capture output sent within <!--XSUPERVISOR:BEGIN--> and
+      <!--XSUPERVISOR:END--> tags and signal a ProcessCommunicationEvent
+      by calling notify(event).
+    - route the output to the appropriate log handlers as specified in the
+      config.
+    """
 
     process = None # process which "owns" this dispatcher
     channel = None # 'stderr' or 'stdout'
@@ -74,35 +81,25 @@ class POutputDispatcher(PDispatcher):
     output_buffer = '' # data waiting to be logged
 
     def __init__(self, process, event_type, fd):
+        """
+        Initialize the dispatcher.
+
+        `event_type` should be one of ProcessLogStdoutEvent or
+        ProcessLogStderrEvent
+        """
         self.process = process
         self.event_type = event_type
         self.fd = fd
         self.channel = channel = self.event_type.channel
 
-        logfile = getattr(process.config, '%s_logfile' % channel)
+        self._setup_logging(process.config, channel)
+
         capture_maxbytes = getattr(process.config,
                                    '%s_capture_maxbytes' % channel)
-
-        if logfile:
-            maxbytes = getattr(process.config, '%s_logfile_maxbytes' % channel)
-            backups = getattr(process.config, '%s_logfile_backups' % channel)
-            fmt = '%(message)s'
-            if logfile == 'syslog':
-                fmt = ' '.join((process.config.name, fmt))
-            self.mainlog = process.config.options.getLogger(
-                logfile,
-                loggers.LevelsByName.INFO,
-                fmt=fmt,
-                rotating=not not maxbytes, # optimization
-                maxbytes=maxbytes,
-                backups=backups)
-
         if capture_maxbytes:
-            self.capturelog = self.process.config.options.getLogger(
-                None, # BoundIO
-                loggers.LevelsByName.INFO,
-                '%(message)s',
-                rotating=False,
+            self.capturelog = loggers.handle_boundIO(
+                self.process.config.options.getLogger(),
+                fmt='%(message)s',
                 maxbytes=capture_maxbytes,
                 )
 
@@ -119,6 +116,35 @@ class POutputDispatcher(PDispatcher):
         self.stdout_events_enabled = config.stdout_events_enabled
         self.stderr_events_enabled = config.stderr_events_enabled
 
+    def _setup_logging(self, config, channel):
+        """
+        Configure the main log according to the process' configuration and
+        channel. Sets `mainlog` on self. Returns nothing.
+        """
+
+        logfile = getattr(config, '%s_logfile' % channel)
+        if not logfile:
+            return
+
+        maxbytes = getattr(config, '%s_logfile_maxbytes' % channel)
+        backups = getattr(config, '%s_logfile_backups' % channel)
+        fmt = '%(message)s'
+        if logfile == 'syslog':
+            warnings.warn("Specifying 'syslog' for filename is deprecated. "
+                "Use %s_syslog instead." % channel, DeprecationWarning)
+            fmt = ' '.join((config.name, fmt))
+        self.mainlog = loggers.handle_file(
+            config.options.getLogger(),
+            filename=logfile,
+            fmt=fmt,
+            rotating=not not maxbytes, # optimization
+            maxbytes=maxbytes,
+            backups=backups)
+
+        if getattr(config, '%s_syslog' % channel, False):
+            fmt = config.name + ' %(message)s'
+            loggers.handle_syslog(self.mainlog, fmt)
+
     def removelogs(self):
         for log in (self.mainlog, self.capturelog):
             if log is not None:
@@ -264,13 +290,14 @@ class PEventListenerDispatcher(PDispatcher):
         if logfile:
             maxbytes = getattr(process.config, '%s_logfile_maxbytes' % channel)
             backups = getattr(process.config, '%s_logfile_backups' % channel)
-            self.childlog = process.config.options.getLogger(
+            self.childlog = loggers.handle_file(
+                process.config.options.getLogger(),
                 logfile,
-                loggers.LevelsByName.INFO,
                 '%(message)s',
                 rotating=not not maxbytes, # optimization
                 maxbytes=maxbytes,
-                backups=backups)
+                backups=backups,
+            )
 
     def removelogs(self):
         if self.childlog is not None:

+ 31 - 22
supervisor/loggers.py

@@ -320,36 +320,45 @@ class SyslogHandler(Handler):
         except:
             self.handleError(record)
 
-def getLogger(filename, level, fmt, rotating=False, maxbytes=0, backups=0,
-              stdout=False):
+def getLogger(level=None):
+    return Logger(level)
 
-    handlers = []
+_2MB = 1<<21
 
-    logger = Logger(level)
+def handle_boundIO(logger, fmt, maxbytes=_2MB):
+    io = BoundIO(maxbytes)
+    handler = StreamHandler(io)
+    handler.setLevel(logger.level)
+    handler.setFormat(fmt)
+    logger.addHandler(handler)
+    logger.getvalue = io.getvalue
 
-    if filename is None:
-        if not maxbytes:
-            maxbytes = 1<<21 #2MB
-        io = BoundIO(maxbytes)
-        handlers.append(StreamHandler(io))
-        logger.getvalue = io.getvalue
+    return logger
+
+def handle_stdout(logger, fmt):
+    handler = StreamHandler(sys.stdout)
+    handler.setFormat(fmt)
+    handler.setLevel(logger.level)
+    logger.addHandler(handler)
+
+def handle_syslog(logger, fmt):
+    handler = SyslogHandler()
+    handler.setFormat(fmt)
+    handler.setLevel(logger.level)
+    logger.addHandler(handler)
 
-    elif filename == 'syslog':
-        handlers.append(SyslogHandler())
+def handle_file(logger, filename, fmt, rotating=False, maxbytes=0, backups=0):
+    if filename == 'syslog':
+        handler = SyslogHandler()
 
     else:
         if rotating is False:
-            handlers.append(FileHandler(filename))
+            handler = FileHandler(filename)
         else:
-            handlers.append(RotatingFileHandler(filename,'a',maxbytes,backups))
+            handler = RotatingFileHandler(filename, 'a', maxbytes, backups)
 
-    if stdout:
-        handlers.append(StreamHandler(sys.stdout))
-
-    for handler in handlers:
-        handler.setFormat(fmt)
-        handler.setLevel(level)
-        logger.addHandler(handler)
+    handler.setFormat(fmt)
+    handler.setLevel(logger.level)
+    logger.addHandler(handler)
 
     return logger
-

+ 24 - 14
supervisor/options.py

@@ -306,7 +306,7 @@ class Options:
 
     def process_config(self, do_usage=True):
         """Process configuration data structure.
-        
+
         This includes reading config file if necessary, setting defaults etc.
         """
         if self.configfile:
@@ -439,10 +439,12 @@ class ServerOptions(Options):
         self.stdout.write('%s\n' % VERSION)
         self.exit(0)
 
-    def getLogger(self, filename, level, fmt, rotating=False, maxbytes=0,
-                  backups=0, stdout=False):
-        return loggers.getLogger(filename, level, fmt, rotating, maxbytes,
-                                 backups, stdout)
+    def getLogger(self, *args, **kwargs):
+        """
+        A proxy to loggers.getLogger so the options might customize log setup.
+        Used by tests to mock log setup.
+        """
+        return loggers.getLogger(*args, **kwargs)
 
     def realize(self, *arg, **kw):
         Options.realize(self, *arg, **kw)
@@ -826,7 +828,7 @@ class ServerOptions(Options):
                 raise ValueError(
                     '%(process_num) must be present within process_name when '
                     'numprocs > 1')
-                    
+
         if stopasgroup and not killasgroup:
             raise ValueError("Cannot set stopasgroup=true and killasgroup=false")
 
@@ -864,6 +866,10 @@ class ServerOptions(Options):
                 maxbytes = byte_size(get(section, mb_key, '50MB'))
                 logfiles[mb_key] = maxbytes
 
+                sy_key = '%s_syslog' % k
+                syslog = boolean(get(section, sy_key, False))
+                logfiles[sy_key] = syslog
+
                 if lf_val is Automatic and not maxbytes:
                     self.parse_warnings.append(
                         'For [%s], AUTO logging used for %s without '
@@ -887,11 +893,13 @@ class ServerOptions(Options):
                 stdout_events_enabled = stdout_events,
                 stdout_logfile_backups=logfiles['stdout_logfile_backups'],
                 stdout_logfile_maxbytes=logfiles['stdout_logfile_maxbytes'],
+                stdout_syslog=logfiles['stdout_syslog'],
                 stderr_logfile=logfiles['stderr_logfile'],
                 stderr_capture_maxbytes = stderr_cmaxbytes,
                 stderr_events_enabled = stderr_events,
                 stderr_logfile_backups=logfiles['stderr_logfile_backups'],
                 stderr_logfile_maxbytes=logfiles['stderr_logfile_maxbytes'],
+                stderr_syslog=logfiles['stderr_syslog'],
                 stopsignal=stopsignal,
                 stopwaitsecs=stopwaitsecs,
                 stopasgroup=stopasgroup,
@@ -1212,7 +1220,7 @@ class ServerOptions(Options):
 
             # always put our primary gid first in this list, otherwise we can
             # lose group info since sometimes the first group in the setgroups
-            # list gets overwritten on the subsequent setgid call (at least on 
+            # list gets overwritten on the subsequent setgid call (at least on
             # freebsd 9 with python 2.7 - this will be safe though for all unix
             # /python version combos)
             groups.insert(0, gid)
@@ -1305,16 +1313,18 @@ class ServerOptions(Options):
 
     def make_logger(self, critical_messages, warn_messages, info_messages):
         # must be called after realize() and after supervisor does setuid()
-        format =  '%(asctime)s %(levelname)s %(message)s\n'
-        self.logger = loggers.getLogger(
+        format = '%(asctime)s %(levelname)s %(message)s\n'
+        self.logger = loggers.getLogger(self.loglevel)
+        if self.nodaemon:
+            loggers.handle_stdout(self.logger, format)
+        loggers.handle_file(
+            self.logger,
             self.logfile,
-            self.loglevel,
             format,
             rotating=True,
             maxbytes=self.logfile_maxbytes,
             backups=self.logfile_backups,
-            stdout = self.nodaemon,
-            )
+        )
         for msg in critical_messages:
             self.logger.critical(msg)
         for msg in warn_messages:
@@ -1625,11 +1635,11 @@ class ProcessConfig(Config):
         'name', 'uid', 'command', 'directory', 'umask', 'priority',
         'autostart', 'autorestart', 'startsecs', 'startretries',
         'stdout_logfile', 'stdout_capture_maxbytes',
-        'stdout_events_enabled',
+        'stdout_events_enabled', 'stdout_syslog',
         'stdout_logfile_backups', 'stdout_logfile_maxbytes',
         'stderr_logfile', 'stderr_capture_maxbytes',
         'stderr_logfile_backups', 'stderr_logfile_maxbytes',
-        'stderr_events_enabled',
+        'stderr_events_enabled', 'stderr_syslog',
         'stopsignal', 'stopwaitsecs', 'stopasgroup', 'killasgroup',
         'exitcodes', 'redirect_stderr' ]
     optional_param_names = [ 'environment', 'serverurl' ]

+ 19 - 14
supervisor/tests/base.py

@@ -142,7 +142,7 @@ class DummyOptions:
     def get_pid(self):
         import os
         return os.getpid()
-        
+
     def check_execv_args(self, filename, argv, st):
         if filename == '/bad/filename':
             from supervisor.options import NotFound
@@ -251,6 +251,8 @@ class DummyOptions:
         self.umaskset = mask
 
 class DummyLogger:
+    level = None
+
     def __init__(self):
         self.reopened = False
         self.removed = False
@@ -267,7 +269,10 @@ class DummyLogger:
         if kw:
             msg = msg % kw
         self.data.append(msg)
-        
+
+    def addHandler(self, handler):
+        pass
+
     def reopen(self):
         self.reopened = True
     def close(self):
@@ -350,7 +355,7 @@ class DummySocketManager:
 
     def get_socket(self):
         return DummySocket(self._config.fd)
-        
+
 class DummyProcess:
     # Initial state; overridden by instance variables
     pid = 0 # Subprocess pid; 0 when not running
@@ -547,7 +552,7 @@ def makeExecutable(file, substitutions=None):
     import os
     import sys
     import tempfile
-    
+
     if substitutions is None:
         substitutions = {}
     data = open(file).read()
@@ -556,7 +561,7 @@ def makeExecutable(file, substitutions=None):
     substitutions['PYTHON'] = sys.executable
     for key in substitutions.keys():
         data = data.replace('<<%s>>' % key.upper(), substitutions[key])
-    
+
     tmpnam = tempfile.mktemp(prefix=last)
     f = open(tmpnam, 'w')
     f.write(data)
@@ -638,7 +643,7 @@ class DummyRequest:
 
     def get_server_url(self):
         return 'http://example.com'
-        
+
 
 class DummyRPCInterfaceFactory:
     def __init__(self, supervisord, **config):
@@ -805,9 +810,9 @@ class DummySupervisorRPCNamespace:
             raise Fault(xmlrpc.Faults.NOT_RUNNING, 'NOT_RUNNING')
         if name == 'FAILED':
             raise Fault(xmlrpc.Faults.FAILED, 'FAILED')
-        
+
         return True
-    
+
     def stopAllProcesses(self):
         from supervisor import xmlrpc
         return [
@@ -938,15 +943,15 @@ class DummyProcessGroup:
 
     def stop_all(self):
         self.all_stopped = True
-        
+
     def get_unstopped_processes(self):
         return self.unstopped_processes
 
     def get_dispatchers(self):
         return self.dispatchers
-        
+
 class DummyFCGIProcessGroup(DummyProcessGroup):
-    
+
     def __init__(self, config):
         DummyProcessGroup.__init__(self, config)
         self.socket_manager = DummySocketManager(config.socket_config)
@@ -1014,7 +1019,7 @@ class DummyDispatcher:
         if self.flush_error:
             raise OSError(self.flush_error)
         self.flushed = True
-                
+
 class DummyStream:
     def __init__(self, error=None):
         self.error = error
@@ -1035,7 +1040,7 @@ class DummyStream:
         pass
     def tell(self):
         return len(self.written)
-        
+
 class DummyEvent:
     def __init__(self, serial='abc'):
         if serial is not None:
@@ -1043,7 +1048,7 @@ class DummyEvent:
 
     def __str__(self):
         return 'dummy event'
-        
+
 def dummy_handler(event, result):
     pass
 

+ 11 - 11
supervisor/tests/test_dispatchers.py

@@ -34,7 +34,7 @@ class POutputDispatcherTests(unittest.TestCase):
         process = DummyProcess(config)
         dispatcher = self._makeOne(process)
         self.assertEqual(dispatcher.writable(), False)
-        
+
     def test_readable_open(self):
         options = DummyOptions()
         config = DummyPConfig(options, 'process1', '/bin/process1')
@@ -67,7 +67,7 @@ class POutputDispatcherTests(unittest.TestCase):
         dispatcher = self._makeOne(process)
         self.assertEqual(dispatcher.handle_read_event(), None)
         self.assertEqual(dispatcher.output_buffer, 'abc')
-        
+
     def test_handle_error(self):
         options = DummyOptions()
         config = DummyPConfig(options, 'test', '/test')
@@ -90,7 +90,7 @@ class POutputDispatcherTests(unittest.TestCase):
         process.pid = 4000
         dispatcher = self._makeOne(process)
         dispatcher.capturemode = True
-        dispatcher.capturelog.data = ['hallooo']
+        dispatcher.capturelog.getvalue = lambda: 'hallooo'
         L = []
         def doit(event):
             L.append(event)
@@ -160,7 +160,7 @@ class POutputDispatcherTests(unittest.TestCase):
         self.assertEqual(len(L), 1)
         event = L[0]
         self.assertEqual(event.process, process)
-        self.assertEqual(event.data, 'hello from stdout') 
+        self.assertEqual(event.data, 'hello from stdout')
 
     def test_record_output_does_not_emit_stdout_event_when_disabled(self):
         options = DummyOptions()
@@ -197,7 +197,7 @@ class POutputDispatcherTests(unittest.TestCase):
         self.assertEqual(len(L), 1)
         event = L[0]
         self.assertEqual(event.process, process)
-        self.assertEqual(event.data, 'hello from stderr') 
+        self.assertEqual(event.data, 'hello from stderr')
 
     def test_record_output_does_not_emit_stderr_event_when_disabled(self):
         options = DummyOptions()
@@ -291,7 +291,7 @@ class POutputDispatcherTests(unittest.TestCase):
                 os.remove(logfile)
             except (OSError, IOError):
                 pass
-        
+
     def test_stdout_capturemode_multiple_buffers(self):
         from supervisor.events import ProcessCommunicationEvent
         from supervisor.events import subscribe
@@ -448,8 +448,8 @@ class POutputDispatcherTests(unittest.TestCase):
         self.assertEqual(dispatcher.closed, True)
         dispatcher.close() # make sure we don't error if we try to close twice
         self.assertEqual(dispatcher.closed, True)
-        
-                        
+
+
 class PInputDispatcherTests(unittest.TestCase):
     def _getTargetClass(self):
         from supervisor.dispatchers import PInputDispatcher
@@ -550,7 +550,7 @@ class PInputDispatcherTests(unittest.TestCase):
         process = DummyProcess(None)
         dispatcher = self._makeOne(process)
         self.assertRaises(NotImplementedError, dispatcher.handle_read_event)
-        
+
     def test_handle_error(self):
         options = DummyOptions()
         config = DummyPConfig(options, 'test', '/test')
@@ -609,7 +609,7 @@ class PEventListenerDispatcherTests(unittest.TestCase):
         process = DummyProcess(config)
         dispatcher = self._makeOne(process)
         self.assertEqual(dispatcher.writable(), False)
-        
+
     def test_readable_open(self):
         options = DummyOptions()
         config = DummyPConfig(options, 'process1', '/bin/process1')
@@ -1042,7 +1042,7 @@ class PEventListenerDispatcherTests(unittest.TestCase):
             drepr.find('<supervisor.tests.base.DummyProcess instance at'),
             -1)
         self.assertTrue(drepr.endswith('(stdout)>'), drepr)
-    
+
     def test_close(self):
         options = DummyOptions()
         config = DummyPConfig(options, 'process1', '/bin/process1')

+ 5 - 5
supervisor/tests/test_options.py

@@ -965,7 +965,7 @@ class ServerOptionsTests(unittest.TestCase):
         pconfig = pconfigs[0]
         self.assertEqual(pconfig.stopasgroup, True)
         self.assertEqual(pconfig.killasgroup, True)
-    
+
     def test_processes_from_section_killasgroup_mismatch_w_stopasgroup(self):
         instance = self._makeOne()
         text = lstrip("""\
@@ -1597,10 +1597,10 @@ class TestProcessConfig(unittest.TestCase):
                      'priority', 'autostart', 'autorestart',
                      'startsecs', 'startretries', 'uid',
                      'stdout_logfile', 'stdout_capture_maxbytes',
-                     'stdout_events_enabled',
+                     'stdout_events_enabled', 'stdout_syslog',
                      'stdout_logfile_backups', 'stdout_logfile_maxbytes',
                      'stderr_logfile', 'stderr_capture_maxbytes',
-                     'stderr_events_enabled',
+                     'stderr_events_enabled', 'stderr_syslog',
                      'stderr_logfile_backups', 'stderr_logfile_maxbytes',
                      'stopsignal', 'stopwaitsecs', 'stopasgroup', 'killasgroup', 'exitcodes',
                      'redirect_stderr', 'environment'):
@@ -1671,10 +1671,10 @@ class FastCGIProcessConfigTest(unittest.TestCase):
                      'priority', 'autostart', 'autorestart',
                      'startsecs', 'startretries', 'uid',
                      'stdout_logfile', 'stdout_capture_maxbytes',
-                     'stdout_events_enabled',
+                     'stdout_events_enabled', 'stdout_syslog',
                      'stdout_logfile_backups', 'stdout_logfile_maxbytes',
                      'stderr_logfile', 'stderr_capture_maxbytes',
-                     'stderr_events_enabled',
+                     'stderr_events_enabled', 'stderr_syslog',
                      'stderr_logfile_backups', 'stderr_logfile_maxbytes',
                      'stopsignal', 'stopwaitsecs', 'stopasgroup', 'killasgroup', 'exitcodes',
                      'redirect_stderr', 'environment'):

+ 9 - 6
supervisor/tests/test_supervisord.py

@@ -62,7 +62,7 @@ class SupervisordTests(unittest.TestCase):
     def tearDown(self):
         from supervisor.events import clear
         clear()
-        
+
     def _getTargetClass(self):
         from supervisor.supervisord import Supervisor
         return Supervisor
@@ -131,7 +131,7 @@ class SupervisordTests(unittest.TestCase):
         process.waitstatus = None, None
         options.pidhistory = {1:process}
         supervisord = self._makeOne(options)
-        
+
         supervisord.reap(once=True)
         self.assertEqual(process.finished, (1,1))
 
@@ -254,14 +254,17 @@ class SupervisordTests(unittest.TestCase):
                 'uid': None, 'stdout_logfile': None, 'stdout_capture_maxbytes': 0,
                 'stdout_events_enabled': False,
                 'stdout_logfile_backups': 0, 'stdout_logfile_maxbytes': 0,
+                'stdout_syslog': False,
                 'stderr_logfile': None, 'stderr_capture_maxbytes': 0,
                 'stderr_events_enabled': False,
                 'stderr_logfile_backups': 0, 'stderr_logfile_maxbytes': 0,
+                'stderr_syslog': False,
                 'redirect_stderr': False,
                 'stopsignal': None, 'stopwaitsecs': 10,
                 'stopasgroup': False,
                 'killasgroup': False,
-                'exitcodes': (0,2), 'environment': None, 'serverurl': None }
+                'exitcodes': (0,2), 'environment': None, 'serverurl': None,
+            }
             result.update(params)
             return ProcessConfig(options, **result)
 
@@ -444,7 +447,7 @@ class SupervisordTests(unittest.TestCase):
         self.assertTrue(isinstance(L[0], events.SupervisorStateChangeEvent))
         self.assertTrue(isinstance(L[1], events.SupervisorStoppingEvent))
         self.assertTrue(isinstance(L[1], events.SupervisorStateChangeEvent))
-        
+
     def test_exit(self):
         options = DummyOptions()
         supervisord = self._makeOne(options)
@@ -506,14 +509,14 @@ class SupervisordTests(unittest.TestCase):
         self.assertEqual(supervisord.ticks[3600], 0)
         self.assertEqual(len(L), 1)
         self.assertEqual(L[-1].__class__, events.Tick5Event)
-        
+
         supervisord.tick(now=61)
         self.assertEqual(supervisord.ticks[5], 60)
         self.assertEqual(supervisord.ticks[60], 60)
         self.assertEqual(supervisord.ticks[3600], 0)
         self.assertEqual(len(L), 3)
         self.assertEqual(L[-1].__class__, events.Tick60Event)
-        
+
         supervisord.tick(now=3601)
         self.assertEqual(supervisord.ticks[5], 3600)
         self.assertEqual(supervisord.ticks[60], 3600)