1  import warnings 
  2  import unittest 
  3  import sys 
  4  import urllib2 
  5  import socket 
  6   
  7  from openid import fetchers 
  8   
  9   
 10   
 21   
 23      def geturl(path): 
 24          return 'http://%s:%s%s' % (socket.getfqdn(server.server_name), 
 25                                     server.socket.getsockname()[1], 
 26                                     path) 
  27   
 28      expected_headers = {'content-type':'text/plain'} 
 29   
 30      def plain(path, code): 
 31          path = '/' + path 
 32          expected = fetchers.HTTPResponse( 
 33              geturl(path), code, expected_headers, path) 
 34          return (path, expected) 
 35   
 36      expect_success = fetchers.HTTPResponse( 
 37          geturl('/success'), 200, expected_headers, '/success') 
 38      cases = [ 
 39          ('/success', expect_success), 
 40          ('/301redirect', expect_success), 
 41          ('/302redirect', expect_success), 
 42          ('/303redirect', expect_success), 
 43          ('/307redirect', expect_success), 
 44          plain('notfound', 404), 
 45          plain('badreq', 400), 
 46          plain('forbidden', 403), 
 47          plain('error', 500), 
 48          plain('server_error', 503), 
 49          ] 
 50   
 51      for path, expected in cases: 
 52          fetch_url = geturl(path) 
 53          try: 
 54              actual = fetcher.fetch(fetch_url) 
 55          except (SystemExit, KeyboardInterrupt): 
 56              pass 
 57          except: 
 58              print fetcher, fetch_url 
 59              raise 
 60          else: 
 61              failUnlessResponseExpected(expected, actual) 
 62   
 63      for err_url in [geturl('/closed'), 
 64                      'http://invalid.janrain.com/', 
 65                      'not:a/url', 
 66                      'ftp://janrain.com/pub/']: 
 67          try: 
 68              result = fetcher.fetch(err_url) 
 69          except (KeyboardInterrupt, SystemExit): 
 70              raise 
 71          except fetchers.HTTPError, why: 
 72               
 73               
 74               
 75              assert exc 
 76          except fetchers.HTTPFetchingError, why: 
 77              assert not exc, (fetcher, exc, server) 
 78          except: 
 79              assert exc 
 80          else: 
 81              assert False, 'An exception was expected for %r (%r)' % (fetcher, result) 
 82   
 84      exc_fetchers = [] 
 85      for klass, library_name in [ 
 86          (fetchers.Urllib2Fetcher, 'urllib2'), 
 87          (fetchers.CurlHTTPFetcher, 'pycurl'), 
 88          (fetchers.HTTPLib2Fetcher, 'httplib2'), 
 89          ]: 
 90          try: 
 91              exc_fetchers.append(klass()) 
 92          except RuntimeError, why: 
 93              if why[0].startswith('Cannot find %s library' % (library_name,)): 
 94                  try: 
 95                      __import__(library_name) 
 96                  except ImportError: 
 97                      warnings.warn( 
 98                          'Skipping tests for %r fetcher because ' 
 99                          'the library did not import.' % (library_name,)) 
100                      pass 
101                  else: 
102                      assert False, ('%s present but not detected' % (library_name,)) 
103              else: 
104                  raise 
105   
106      non_exc_fetchers = [] 
107      for f in exc_fetchers: 
108          non_exc_fetchers.append(fetchers.ExceptionWrappingFetcher(f)) 
109   
110      for f in exc_fetchers: 
111          test_fetcher(f, True, server) 
112   
113      for f in non_exc_fetchers: 
114          test_fetcher(f, False, server) 
 115   
116  from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer 
117   
119      cases = { 
120          '/success':(200, None), 
121          '/301redirect':(301, '/success'), 
122          '/302redirect':(302, '/success'), 
123          '/303redirect':(303, '/success'), 
124          '/307redirect':(307, '/success'), 
125          '/notfound':(404, None), 
126          '/badreq':(400, None), 
127          '/forbidden':(403, None), 
128          '/error':(500, None), 
129          '/server_error':(503, None), 
130          } 
131   
134   
136          if self.path == '/closed': 
137              self.wfile.close() 
138          else: 
139              try: 
140                  http_code, location = self.cases[self.path] 
141              except KeyError: 
142                  self.errorResponse('Bad path') 
143              else: 
144                  extra_headers = [('Content-type', 'text/plain')] 
145                  if location is not None: 
146                      host, port = self.server.server_address 
147                      base = ('http://%s:%s' % (socket.getfqdn(host), port,)) 
148                      location = base + location 
149                      extra_headers.append(('Location', location)) 
150                  self._respond(http_code, extra_headers, self.path) 
 151   
153          try: 
154              http_code, extra_headers = self.cases[self.path] 
155          except KeyError: 
156              self.errorResponse('Bad path') 
157          else: 
158              if http_code in [301, 302, 303, 307]: 
159                  self.errorResponse() 
160              else: 
161                  content_type = self.headers.get('content-type', 'text/plain') 
162                  extra_headers.append(('Content-type', content_type)) 
163                  content_length = int(self.headers.get('Content-length', '-1')) 
164                  body = self.rfile.read(content_length) 
165                  self._respond(http_code, extra_headers, body) 
 166   
168          req = [ 
169              ('HTTP method', self.command), 
170              ('path', self.path), 
171              ] 
172          if message: 
173              req.append(('message', message)) 
174   
175          body_parts = ['Bad request:\r\n'] 
176          for k, v in req: 
177              body_parts.append(' %s: %s\r\n' % (k, v)) 
178          body = ''.join(body_parts) 
179          self._respond(400, [('Content-type', 'text/plain')], body) 
 180   
181 -    def _respond(self, http_code, extra_headers, body): 
 182          self.send_response(http_code) 
183          for k, v in extra_headers: 
184              self.send_header(k, v) 
185          self.end_headers() 
186          self.wfile.write(body) 
187          self.wfile.close() 
 188   
190          if not self.wfile.closed: 
191              self.wfile.flush() 
192          self.wfile.close() 
193          self.rfile.close() 
  194   
196      import socket 
197      host = socket.getfqdn('127.0.0.1') 
198       
199       
200       
201      import os 
202      port = (os.getpid() % 31000) + 1024 
203   
204      server = HTTPServer((host, port), FetcherTestHandler) 
205   
206      import threading 
207      server_thread = threading.Thread(target=server.serve_forever) 
208      server_thread.setDaemon(True) 
209      server_thread.start() 
210   
211      run_fetcher_tests(server) 
 212   
218   
223   
227   
233   
240   
247   
258   
260          """Make sure that if we set a non-wrapped fetcher as default, 
261          it will not wrap exceptions.""" 
262           
263           
264          fetcher = fetchers.Urllib2Fetcher() 
265          fetchers.setDefaultFetcher(fetcher, wrap_exceptions=False) 
266   
267          self.failIf(isinstance(fetchers.getDefaultFetcher(), 
268                                 fetchers.ExceptionWrappingFetcher)) 
269   
270          try: 
271              fetchers.fetch('http://invalid.janrain.com/') 
272          except fetchers.HTTPFetchingError: 
273              self.fail('Should not be wrapping exception') 
274          except: 
275              exc = sys.exc_info()[1] 
276              self.failUnless(isinstance(exc, urllib2.URLError), exc) 
277              pass 
278          else: 
279              self.fail('Should have raised an exception') 
  280   
286