add try/except around proxy
authorlkcl <lkcl@teenymac.(none)>
Wed, 14 Jul 2010 21:50:51 +0000 (22:50 +0100)
committerlkcl <lkcl@teenymac.(none)>
Wed, 14 Jul 2010 21:50:51 +0000 (22:50 +0100)
ProxyServer.py

index dd10f75e1a6c8519b3968b1cc99eb541f06af07c..15380f6a0d3131da968b9bd7b55dd827ffdbce66 100644 (file)
@@ -157,130 +157,137 @@ class ProxyServerRequestHandler(object):
         #    (yield multitask.sleep(0.01))
         p.serving = True
 
-        # send command
-        req = "%s %s %s\n" % (reqtype, self.hr.path, "HTTP/1.1")
-        print "req", req
-        yield p.ss.write(req)
-
-        conntype = self.hr.headers.get('Connection', "")
-        keepalive = conntype.lower() == 'keep-alive'
-
-        self.hr.headers['Connection'] = 'keep-alive'
-        self.hr.close_connection = 0
-
-        # send headers
-        hdrs = str(self.hr.headers)
-        print "hdrs", hdrs
-        yield p.ss.write(hdrs)
-        yield p.ss.write('\r\n')
-
-        # now content
-        if self.hr.headers.has_key('content-length'):
-            max_chunk_size = 10*1024*1024
-            size_remaining = int(self.hr.headers["content-length"])
-            L = []
-            print "size_remaining", size_remaining
-            while size_remaining:
-                chunk_size = min(size_remaining, max_chunk_size)
-                data = self.hr.rfile.read(chunk_size)
-                print "proxy rfile read", repr(data)
-                yield multitask.send(p.sock, data)
-                size_remaining -= len(data)
-
-        # now read response and write back
-        # HTTP/1.0 200 OK status line etc.
-        responseline = (yield p.ss.readline())
-        yield self.client.writeMessage(responseline)
-
-        res = ''
         try:
-            while 1:
-                line = (yield p.ss.readline())
-                print "reading from proxy", repr(line)
-                res += line
-                if line in ['\n', '\r\n']:
-                    break
-        except StopIteration:
-            if httpd._debug: print "proxy read stopiter"
-            # TODO: close connection
-        except:
-            if httpd._debug:
-                print 'proxy read error', \
-                      (traceback and traceback.print_exc() or None)
-            # TODO: close connection
-
-        f = StringIO(res)
-
-        # Examine the headers and look for a Connection directive
-        respheaders = mimetools.Message(f, 0)
-        print "response headers", str(respheaders)
-        remote = self.client.remote
-        rcooks = httpd.process_cookies(respheaders, remote, "Set-Cookie", False)
-        rcooks['session'] = self.hr.response_cookies['session'].value # nooo
-        rcooks['session']['expires'] = \
-                self.hr.response_cookies['session']['expires']
-        self.hr.response_cookies = rcooks
-        print "rcooks", str(rcooks)
-
-        # override connection: keep-alive hack
-        #responseline = responseline.split(" ")
-        #print "responseline:", responseline
-        #if responseline[1] != "200":
-        #    respheaders['Connection'] = 'close'
-
-        # send all but Set-Cookie headers
-        del respheaders['Set-Cookie'] # being replaced
-        yield self.client.writeMessage(str(respheaders))
-
-        # now replacement cookies
-        for k, v in rcooks.items():
-            val = v.output()
-            yield self.client.writeMessage(val+"\r\n")
-
-        # check connection for "closed" header
-        if keepalive:
-            conntype = respheaders.get('Connection', "")
-            if conntype.lower() == 'close':
-                self.hr.close_connection = 1
-            elif (conntype.lower() == 'keep-alive' and
-                  self.hr.protocol_version >= "HTTP/1.1"):
-                self.hr.close_connection = 0
-
-        # write rest of data
-        print "writing to client body"
-        yield self.client.writeMessage("\r\n")
-
-        if respheaders.has_key('content-length'):
-            max_chunk_size = 10*1024*1024
-            size_remaining = int(respheaders["content-length"])
-            while size_remaining:
-                chunk_size = min(size_remaining, max_chunk_size)
-                data = (yield p.ss.read(chunk_size))
-                print "reading from proxy expecting", size_remaining, repr(data)
-                yield self.client.writeMessage(data)
-                size_remaining -= len(data)
-        else:
-            while True:
-                #data = p.read()
+            # send command
+            req = "%s %s %s\n" % (reqtype, self.hr.path, "HTTP/1.1")
+            print "req", req
+            yield p.ss.write(req)
+
+            conntype = self.hr.headers.get('Connection', "")
+            keepalive = conntype.lower() == 'keep-alive'
+
+            self.hr.headers['Connection'] = 'keep-alive'
+            self.hr.close_connection = 0
+
+            # send headers
+            hdrs = str(self.hr.headers)
+            print "hdrs", hdrs
+            yield p.ss.write(hdrs)
+            yield p.ss.write('\r\n')
+
+            # now content
+            if self.hr.headers.has_key('content-length'):
+                max_chunk_size = 10*1024*1024
+                size_remaining = int(self.hr.headers["content-length"])
+                L = []
+                print "size_remaining", size_remaining
+                while size_remaining:
+                    chunk_size = min(size_remaining, max_chunk_size)
+                    data = self.hr.rfile.read(chunk_size)
+                    print "proxy rfile read", repr(data)
+                    yield multitask.send(p.sock, data)
+                    size_remaining -= len(data)
+
+            # now read response and write back
+            # HTTP/1.0 200 OK status line etc.
+            responseline = (yield p.ss.readline())
+            yield self.client.writeMessage(responseline)
+
+            res = ''
+            try:
+                while 1:
+                    line = (yield p.ss.readline())
+                    print "reading from proxy", repr(line)
+                    res += line
+                    if line in ['\n', '\r\n']:
+                        break
+            except StopIteration:
+                if httpd._debug: print "proxy read stopiter"
+                # TODO: close connection
+            except:
+                if httpd._debug:
+                    print 'proxy read error', \
+                          (traceback and traceback.print_exc() or None)
+                # TODO: close connection
+
+            f = StringIO(res)
+
+            # Examine the headers and look for a Connection directive
+            respheaders = mimetools.Message(f, 0)
+            print "response headers", str(respheaders)
+            remote = self.client.remote
+            rcooks = httpd.process_cookies(respheaders, remote, "Set-Cookie", False)
+            rcooks['session'] = self.hr.response_cookies['session'].value # nooo
+            rcooks['session']['expires'] = \
+                    self.hr.response_cookies['session']['expires']
+            self.hr.response_cookies = rcooks
+            print "rcooks", str(rcooks)
+
+            # override connection: keep-alive hack
+            #responseline = responseline.split(" ")
+            #print "responseline:", responseline
+            #if responseline[1] != "200":
+            #    respheaders['Connection'] = 'close'
+
+            # send all but Set-Cookie headers
+            del respheaders['Set-Cookie'] # being replaced
+            yield self.client.writeMessage(str(respheaders))
+
+            # now replacement cookies
+            for k, v in rcooks.items():
+                val = v.output()
+                yield self.client.writeMessage(val+"\r\n")
+
+            # check connection for "closed" header
+            if keepalive:
+                conntype = respheaders.get('Connection', "")
+                if conntype.lower() == 'close':
+                    self.hr.close_connection = 1
+                elif (conntype.lower() == 'keep-alive' and
+                      self.hr.protocol_version >= "HTTP/1.1"):
+                    self.hr.close_connection = 0
+
+            # write rest of data
+            print "writing to client body"
+            yield self.client.writeMessage("\r\n")
+
+            if respheaders.has_key('content-length'):
+                max_chunk_size = 10*1024*1024
+                size_remaining = int(respheaders["content-length"])
+                while size_remaining:
+                    chunk_size = min(size_remaining, max_chunk_size)
+                    data = (yield p.ss.read(chunk_size))
+                    print "reading from proxy expecting", size_remaining, repr(data)
+                    yield self.client.writeMessage(data)
+                    size_remaining -= len(data)
+            else:
+                while True:
+                    #data = p.read()
+                    try:
+                        data = (yield p.ss.read(1024))
+                    except httpd.ConnectionClosed:
+                        break
+                    print "reading from proxy", repr(data)
+                    if data == '':
+                        break
+                    yield self.client.writeMessage(data)
+
+            if not keepalive: #self.hr.close_connection:
+                print 'proxy wants client to close_connection'
                 try:
-                    data = (yield p.ss.read(1024))
+                    yield self.client.connectionClosed()
+                    raise httpd.ConnectionClosed
                 except httpd.ConnectionClosed:
-                    break
-                print "reading from proxy", repr(data)
-                if data == '':
-                    break
-                yield self.client.writeMessage(data)
-
-        if not keepalive: #self.hr.close_connection:
-            print 'proxy wants client to close_connection'
-            try:
-                yield self.client.connectionClosed()
-                raise httpd.ConnectionClosed
-            except httpd.ConnectionClosed:
-                print 'close_connection done'
-                pass
+                    print 'close_connection done'
+                    pass
 
-        p.serving = False
+            p.serving = False
+        except httpd.ConnectionClosed:
+            # whoops...
+            raise httpd.ConnectionClosed
+        except:
+            print traceback.print_exc()
+            
 
         raise StopIteration