[Zope-Checkins] SVN: Zope/branches/tseaver-fix_wsgi/src/ZServer/ PEP 8; import normalization; acommodate new internals of ZPublisher.HTTPRespose.

Tres Seaver tseaver at palladion.com
Tue Dec 22 21:14:35 EST 2009


Log message for revision 106988:
  PEP 8; import normalization;  acommodate new internals of ZPublisher.HTTPRespose.

Changed:
  U   Zope/branches/tseaver-fix_wsgi/src/ZServer/HTTPResponse.py
  U   Zope/branches/tseaver-fix_wsgi/src/ZServer/tests/test_responses.py

-=-
Modified: Zope/branches/tseaver-fix_wsgi/src/ZServer/HTTPResponse.py
===================================================================
--- Zope/branches/tseaver-fix_wsgi/src/ZServer/HTTPResponse.py	2009-12-23 01:52:01 UTC (rev 106987)
+++ Zope/branches/tseaver-fix_wsgi/src/ZServer/HTTPResponse.py	2009-12-23 02:14:35 UTC (rev 106988)
@@ -17,58 +17,63 @@
 and logging duties.
 
 """
-import time, re,  sys, tempfile
+import asyncore
 from cStringIO import StringIO
+import re
+import tempfile
 import thread
-from ZPublisher.HTTPResponse import HTTPResponse
-from ZPublisher.Iterators import IStreamIterator
-from medusa.http_date import build_http_date
-from PubCore.ZEvent import Wakeup
-from medusa.producers import hooked_producer
-from medusa import http_server
-import asyncore
-from Producers import ShutdownProducer, LoggingProducer, CallbackProducer, \
-    file_part_producer, file_close_producer, iterator_producer
-import DebugLogger
+import time
 
+from ZPublisher.HTTPResponse import HTTPResponse # WTF?
+from ZPublisher.Iterators import IStreamIterator # WTF?
 
+from ZServer.medusa.http_date import build_http_date
+from ZServer.PubCore.ZEvent import Wakeup
+from ZServer.medusa import http_server
+
+from ZServer.Producers import ShutdownProducer
+from ZServer.Producers import LoggingProducer
+from ZServer.Producers import CallbackProducer
+from ZServer.Producers import file_part_producer
+from ZServer.Producers import file_close_producer
+from ZServer.Producers import iterator_producer
+from ZServer.DebugLogger import log
+
+
 class ZServerHTTPResponse(HTTPResponse):
     "Used to push data into a channel's producer fifo"
 
     # Set this value to 1 if streaming output in
     # HTTP/1.1 should use chunked encoding
-    http_chunk=1
-    http_chunk_size=1024
+    http_chunk = 1
+    http_chunk_size = 1024
 
     # defaults
-    _http_version='1.0'
-    _http_connection='close'
-    _server_version='Zope/2.0 ZServer/2.0'
+    _http_version = '1.0'
+    _http_connection = 'close'
+    _server_version = 'Zope/2.0 ZServer/2.0'
 
     # using streaming response
-    _streaming=0
+    _streaming = 0
     # using chunking transfer-encoding
-    _chunking=0
+    _chunking = 0
     _bodyproducer = None
 
-    def __str__(self,
-                html_search=re.compile('<html>',re.I).search,
-                ):
+    def __str__(self):
         if self._wrote:
             if self._chunking:
                 return '0\r\n\r\n'
             else:
                 return ''
 
-        headers=self.headers
-        body=self.body
+        headers = self.headers
+        body = self.body
 
         # set 204 (no content) status if 200 and response is empty
         # and not streaming
-        if not headers.has_key('content-type') and \
-                not headers.has_key('content-length') and \
-                not self._streaming and \
-                self.status == 200:
+        if ('content-type' not in headers and 
+            'content-length' not in headers and 
+            not self._streaming and self.status == 200):
             self.setStatus('nocontent')
 
         if self.status in (100, 101, 102, 204, 304):
@@ -82,65 +87,65 @@
         elif not headers.has_key('content-length') and not self._streaming:
             self.setHeader('content-length', len(body))
 
-        headersl=[]
-        append=headersl.append
+        chunks = []
+        append = chunks.append
 
-        status=headers.get('status', '200 OK')
 
         # status header must come first.
-        append("HTTP/%s %s" % (self._http_version or '1.0' , status))
-        if headers.has_key('status'):
-            del headers['status']
+        append("HTTP/%s %d %s" % (self._http_version or '1.0',
+                                  self.status, self.errmsg))
 
         # add zserver headers
         append('Server: %s' % self._server_version)
         append('Date: %s' % build_http_date(time.time()))
 
-        if self._http_version=='1.0':
-            if self._http_connection=='keep-alive':
+        if self._http_version == '1.0':
+            if self._http_connection == 'keep-alive':
                 self.setHeader('Connection','Keep-Alive')
             else:
                 self.setHeader('Connection','close')
 
         # Close the connection if we have been asked to.
         # Use chunking if streaming output.
-        if self._http_version=='1.1':
-            if self._http_connection=='close':
+        if self._http_version == '1.1':
+            if self._http_connection == 'close':
                 self.setHeader('Connection','close')
             elif (not self.headers.has_key('content-length') and 
                   self.http_chunk and self._streaming):
                 self.setHeader('Transfer-Encoding','chunked')
-                self._chunking=1
+                self._chunking = 1
                     
         headers = headers.items()
-        for line in self.accumulated_headers.splitlines():
+        for line in self.accumulated_headers:
             if line[0] == '\t':
                 headers[-1][1] += '\n' + line
                 continue
             headers.append(line.split(': ', 1))
 
         for key, val in headers:
-            if key.lower()==key:
+            if key.lower() == key:
                 # only change non-literal header names
-                key="%s%s" % (key[:1].upper(), key[1:])
-                start=0
-                l=key.find('-',start)
+                key = "%s%s" % (key[:1].upper(), key[1:])
+                start = 0
+                l = key.find('-',start)
                 while l >= start:
-                    key="%s-%s%s" % (key[:l],key[l+1:l+2].upper(),key[l+2:])
-                    start=l+1
-                    l=key.find('-',start)
+                    key = "%s-%s%s" % (key[:l],
+                                       key[l+1:l+2].upper(),
+                                       key[l+2:])
+                    start = l + 1
+                    l = key.find('-', start)
                 val = val.replace('\n\t', '\r\n\t')
             append("%s: %s" % (key, val))
         if self.cookies:
-            headersl.extend(self._cookie_list())
+            chunks.extend(self._cookie_list())
             
         append('')
         append(body)
-        return "\r\n".join(headersl)
+        return "\r\n".join(chunks)
 
-    _tempfile=None
-    _templock=None
-    _tempstart=0
+    _tempfile = None
+    _templock = None
+    _tempstart = 0
 
     def write(self,data):
         """\
@@ -162,42 +167,42 @@
         if type(data) != type(''):
             raise TypeError('Value must be a string')
 
-        stdout=self.stdout
+        stdout = self.stdout
 
         if not self._wrote:
-            l=self.headers.get('content-length', None)
+            l = self.headers.get('content-length', None)
             if l is not None:
                 try:
-                    if type(l) is type(''): l=int(l)
+                    if type(l) is type(''): l = int(l)
                     if l > 128000:
-                        self._tempfile=tempfile.TemporaryFile()
-                        self._templock=thread.allocate_lock()
+                        self._tempfile = tempfile.TemporaryFile()
+                        self._templock = thread.allocate_lock()
                 except: pass
 
-            self._streaming=1
+            self._streaming = 1
             stdout.write(str(self))
-            self._wrote=1
+            self._wrote = 1
 
         if not data: return
 
         if self._chunking:
             data = '%x\r\n%s\r\n' % (len(data),data)
 
-        l=len(data)
+        l = len(data)
 
-        t=self._tempfile
+        t = self._tempfile
         if t is None or l<200:
             stdout.write(data)
         else:
-            b=self._tempstart
-            e=b+l
+            b = self._tempstart
+            e = b + l
             self._templock.acquire()
             try:
                 t.seek(b)
                 t.write(data)
             finally:
                 self._templock.release()
-            self._tempstart=e
+            self._tempstart = e
             stdout.write(file_part_producer(t,self._templock,b,e), l)
 
     _retried_response = None
@@ -209,18 +214,18 @@
             finally:
                 self._retried_response = None
             return
-        stdout=self.stdout
+        stdout = self.stdout
 
-        t=self._tempfile
+        t = self._tempfile
         if t is not None:
             stdout.write(file_close_producer(t), 0)
-            self._tempfile=None
+            self._tempfile = None
 
         stdout.finish(self)
         stdout.close()
 
-        self.stdout=None # need to break cycle?
-        self._request=None
+        self.stdout = None # need to break cycle?
+        self._request = None
 
     def retry(self):
         """Return a request object to be used in a retry attempt
@@ -229,11 +234,11 @@
         # only stdout stderr were passed to the constructor. OTOH, I
         # think that that's all that is ever passed.
 
-        response=self.__class__(stdout=self.stdout, stderr=self.stderr)
-        response.headers=self.headers
-        response._http_version=self._http_version
-        response._http_connection=self._http_connection
-        response._server_version=self._server_version
+        response = self.__class__(stdout=self.stdout, stderr=self.stderr)
+        response.headers = self.headers
+        response._http_version = self._http_version
+        response._http_connection = self._http_connection
+        response._server_version = self._server_version
         self._retried_response = response
         return response
 
@@ -267,28 +272,28 @@
     restrict access to channel to the push method only."""
 
     def __init__(self, request):
-        self._channel=request.channel
-        self._request=request
-        self._shutdown=0
-        self._close=0
-        self._bytes=0
+        self._channel = request.channel
+        self._request = request
+        self._shutdown = 0
+        self._close = 0
+        self._bytes = 0
 
     def write(self, text, l=None):
         if self._channel.closed:
             return
-        if l is None: l=len(text)
-        self._bytes=self._bytes + l
+        if l is None: l = len(text)
+        self._bytes = self._bytes + l
         self._channel.push(text,0)
         Wakeup()
 
     def close(self):
-        DebugLogger.log('A', id(self._request),
+        log('A', id(self._request),
                 '%s %s' % (self._request.reply_code, self._bytes))
         if not self._channel.closed:
             self._channel.push(LoggingProducer(self._request, self._bytes), 0)
             self._channel.push(CallbackProducer(self._channel.done), 0)
             self._channel.push(CallbackProducer(
-                lambda t=('E', id(self._request)): apply(DebugLogger.log, t)), 0)
+                lambda t=('E', id(self._request)): apply(log, t)), 0)
             if self._shutdown:
                 self._channel.push(ShutdownProducer(), 0)
                 Wakeup()
@@ -299,15 +304,15 @@
             # channel closed too soon
 
             self._request.log(self._bytes)
-            DebugLogger.log('E', id(self._request))
+            log('E', id(self._request))
 
             if self._shutdown:
                 Wakeup(lambda: asyncore.close_all())
             else:
                 Wakeup()
 
-        self._channel=None #need to break cycles?
-        self._request=None
+        self._channel = None #need to break cycles?
+        self._request = None
 
     def flush(self): pass # yeah, whatever
 
@@ -316,8 +321,8 @@
             self._shutdown = 1
         if response.headers.get('connection','') == 'close' or \
                 response.headers.get('Connection','') == 'close':
-            self._close=1
-        self._request.reply_code=response.status
+            self._close = 1
+        self._request.reply_code = response.status
 
     def start_response(self, status, headers, exc_info=None):
         # Used for WSGI
@@ -337,9 +342,10 @@
     "Simple http response factory"
     # should this be integrated into the HTTPResponse constructor?
 
-    response=ZServerHTTPResponse(stdout=ChannelPipe(request), stderr=StringIO())
-    response._http_version=request.version
-    if request.version=='1.0' and is_proxying_match(request.request):
+    response = ZServerHTTPResponse(stdout=ChannelPipe(request),
+                                   stderr=StringIO())
+    response._http_version = request.version
+    if request.version == '1.0' and is_proxying_match(request.request):
         # a request that was made as if this zope was an http 1.0 proxy.
         # that means we have to use some slightly different http
         # headers to manage persistent connections.
@@ -349,5 +355,5 @@
         connection_re = http_server.CONNECTION
     response._http_connection = http_server.get_header(connection_re,
                                                        request.header).lower()
-    response._server_version=request.channel.server.SERVER_IDENT
+    response._server_version = request.channel.server.SERVER_IDENT
     return response

Modified: Zope/branches/tseaver-fix_wsgi/src/ZServer/tests/test_responses.py
===================================================================
--- Zope/branches/tseaver-fix_wsgi/src/ZServer/tests/test_responses.py	2009-12-23 01:52:01 UTC (rev 106987)
+++ Zope/branches/tseaver-fix_wsgi/src/ZServer/tests/test_responses.py	2009-12-23 02:14:35 UTC (rev 106988)
@@ -107,7 +107,9 @@
             'Title-Cased': 'bar',
             'mixed-CasED': 'spam',
             'multilined': 'eggs\n\tham'}
-        response.accumulated_headers = 'foo-bar: bar\n\tbaz\nFoo-bar: monty\n'
+        response.accumulated_headers = ['foo-bar: bar',
+                                        '\tbaz',
+                                        'Foo-bar: monty']
         response.cookies = dict(foo=dict(value='bar'))
         response.body = 'A body\nwith multiple lines\n'
         



More information about the Zope-Checkins mailing list