зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1428709 - Add six for Python 3 compat in mozhttpd; r=davehunt
MozReview-Commit-ID: 1fNdmG9YVQq --HG-- extra : rebase_source : 560e433640947384baf93d53ffaedad3a64ea2c4
This commit is contained in:
Родитель
0ea361ac9f
Коммит
6650158314
|
@ -6,8 +6,7 @@
|
|||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import BaseHTTPServer
|
||||
import SimpleHTTPServer
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import threading
|
||||
|
@ -15,15 +14,22 @@ import posixpath
|
|||
import socket
|
||||
import sys
|
||||
import os
|
||||
import urllib
|
||||
import urlparse
|
||||
import re
|
||||
import moznetwork
|
||||
import time
|
||||
from SocketServer import ThreadingMixIn
|
||||
|
||||
from six import iteritems
|
||||
from six.moves.socketserver import ThreadingMixIn
|
||||
from six.moves.BaseHTTPServer import HTTPServer
|
||||
|
||||
from six.moves.urllib.parse import (
|
||||
urlsplit,
|
||||
unquote,
|
||||
)
|
||||
from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler
|
||||
|
||||
|
||||
class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer):
|
||||
class EasyServer(ThreadingMixIn, HTTPServer):
|
||||
allow_reuse_address = True
|
||||
acceptable_errors = (errno.EPIPE, errno.ECONNABORTED)
|
||||
|
||||
|
@ -50,7 +56,7 @@ class Request(object):
|
|||
def __init__(self, uri, headers, rfile=None):
|
||||
self.uri = uri
|
||||
self.headers = headers
|
||||
parsed = urlparse.urlsplit(uri)
|
||||
parsed = urlsplit(uri)
|
||||
for i, attr in enumerate(self.uri_attrs):
|
||||
setattr(self, attr, parsed[i])
|
||||
try:
|
||||
|
@ -63,7 +69,7 @@ class Request(object):
|
|||
self.body = None
|
||||
|
||||
|
||||
class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||
class RequestHandler(SimpleHTTPRequestHandler):
|
||||
|
||||
docroot = os.getcwd() # current working directory at time of import
|
||||
proxy_host_dirs = False
|
||||
|
@ -72,7 +78,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
request = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
SimpleHTTPServer.SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
self.extensions_map['.svg'] = 'image/svg+xml'
|
||||
|
||||
def _try_handler(self, method):
|
||||
|
@ -89,7 +95,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
(response_code, headerdict, data) = \
|
||||
handler['function'](self.request, *m.groups())
|
||||
self.send_response(response_code)
|
||||
for (keyword, value) in headerdict.iteritems():
|
||||
for (keyword, value) in iteritems(headerdict):
|
||||
self.send_header(keyword, value)
|
||||
self.end_headers()
|
||||
self.wfile.write(data)
|
||||
|
@ -103,7 +109,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
using self.path_mappings and self.docroot.
|
||||
Return (url_path, disk_path)."""
|
||||
path_components = filter(None, self.request.path.split('/'))
|
||||
for prefix, disk_path in self.path_mappings.iteritems():
|
||||
for prefix, disk_path in iteritems(self.path_mappings):
|
||||
prefix_components = filter(None, prefix.split('/'))
|
||||
if len(path_components) < len(prefix_components):
|
||||
continue
|
||||
|
@ -115,7 +121,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
return None
|
||||
|
||||
def parse_request(self):
|
||||
retval = SimpleHTTPServer.SimpleHTTPRequestHandler.parse_request(self)
|
||||
retval = SimpleHTTPRequestHandler.parse_request(self)
|
||||
self.request = Request(self.path, self.headers, self.rfile)
|
||||
return retval
|
||||
|
||||
|
@ -129,7 +135,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
if self.request.netloc and self.proxy_host_dirs:
|
||||
self.path = '/' + self.request.netloc + \
|
||||
self.path
|
||||
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
|
||||
SimpleHTTPRequestHandler.do_GET(self)
|
||||
else:
|
||||
self.send_response(404)
|
||||
self.end_headers()
|
||||
|
@ -158,7 +164,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
# except we serve from self.docroot instead of os.getcwd(), and
|
||||
# parse_request()/do_GET() have already stripped the query string and
|
||||
# fragment and mangled the path for proxying, if required.
|
||||
path = posixpath.normpath(urllib.unquote(self.path))
|
||||
path = posixpath.normpath(unquote(self.path))
|
||||
words = path.split('/')
|
||||
words = filter(None, words)
|
||||
path = self.disk_root
|
||||
|
|
|
@ -7,7 +7,7 @@ from __future__ import absolute_import
|
|||
from setuptools import setup
|
||||
|
||||
PACKAGE_VERSION = '0.7'
|
||||
deps = ['moznetwork >= 0.24']
|
||||
deps = ['moznetwork >= 0.24', 'mozinfo >= 1.0.0', 'six >= 1.10.0']
|
||||
|
||||
setup(name='mozhttpd',
|
||||
version=PACKAGE_VERSION,
|
||||
|
|
|
@ -8,7 +8,6 @@ from __future__ import absolute_import
|
|||
|
||||
import mozfile
|
||||
import mozhttpd
|
||||
import urllib2
|
||||
import os
|
||||
import unittest
|
||||
import json
|
||||
|
@ -16,6 +15,16 @@ import tempfile
|
|||
|
||||
import mozunit
|
||||
|
||||
from six.moves.urllib.request import (
|
||||
HTTPHandler,
|
||||
ProxyHandler,
|
||||
Request,
|
||||
build_opener,
|
||||
install_opener,
|
||||
urlopen,
|
||||
)
|
||||
from six.moves.urllib.error import HTTPError
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
|
@ -54,7 +63,7 @@ class ApiTest(unittest.TestCase):
|
|||
def try_get(self, server_port, querystr):
|
||||
self.resource_get_called = 0
|
||||
|
||||
f = urllib2.urlopen(self.get_url('/api/resource/1', server_port, querystr))
|
||||
f = urlopen(self.get_url('/api/resource/1', server_port, querystr))
|
||||
try:
|
||||
self.assertEqual(f.getcode(), 200)
|
||||
except AttributeError:
|
||||
|
@ -67,9 +76,11 @@ class ApiTest(unittest.TestCase):
|
|||
|
||||
postdata = {'hamburgers': '1234'}
|
||||
try:
|
||||
f = urllib2.urlopen(self.get_url('/api/resource/', server_port, querystr),
|
||||
data=json.dumps(postdata))
|
||||
except urllib2.HTTPError as e:
|
||||
f = urlopen(
|
||||
self.get_url('/api/resource/', server_port, querystr),
|
||||
data=json.dumps(postdata),
|
||||
)
|
||||
except HTTPError as e:
|
||||
# python 2.4
|
||||
self.assertEqual(e.code, 201)
|
||||
body = e.fp.read()
|
||||
|
@ -84,8 +95,8 @@ class ApiTest(unittest.TestCase):
|
|||
def try_del(self, server_port, querystr):
|
||||
self.resource_del_called = 0
|
||||
|
||||
opener = urllib2.build_opener(urllib2.HTTPHandler)
|
||||
request = urllib2.Request(self.get_url('/api/resource/1', server_port, querystr))
|
||||
opener = build_opener(HTTPHandler)
|
||||
request = Request(self.get_url('/api/resource/1', server_port, querystr))
|
||||
request.get_method = lambda: 'DEL'
|
||||
f = opener.open(request)
|
||||
|
||||
|
@ -127,8 +138,8 @@ class ApiTest(unittest.TestCase):
|
|||
# GET: By default we don't serve any files if we just define an API
|
||||
exception_thrown = False
|
||||
try:
|
||||
urllib2.urlopen(self.get_url('/', server_port, None))
|
||||
except urllib2.HTTPError as e:
|
||||
urlopen(self.get_url('/', server_port, None))
|
||||
except HTTPError as e:
|
||||
self.assertEqual(e.code, 404)
|
||||
exception_thrown = True
|
||||
self.assertTrue(exception_thrown)
|
||||
|
@ -143,8 +154,8 @@ class ApiTest(unittest.TestCase):
|
|||
# GET: Return 404 for non-existent endpoint
|
||||
exception_thrown = False
|
||||
try:
|
||||
urllib2.urlopen(self.get_url('/api/resource/', server_port, None))
|
||||
except urllib2.HTTPError as e:
|
||||
urlopen(self.get_url('/api/resource/', server_port, None))
|
||||
except HTTPError as e:
|
||||
self.assertEqual(e.code, 404)
|
||||
exception_thrown = True
|
||||
self.assertTrue(exception_thrown)
|
||||
|
@ -152,9 +163,11 @@ class ApiTest(unittest.TestCase):
|
|||
# POST: POST should also return 404
|
||||
exception_thrown = False
|
||||
try:
|
||||
urllib2.urlopen(self.get_url('/api/resource/', server_port, None),
|
||||
data=json.dumps({}))
|
||||
except urllib2.HTTPError as e:
|
||||
urlopen(
|
||||
self.get_url('/api/resource/', server_port, None),
|
||||
data=json.dumps({}),
|
||||
)
|
||||
except HTTPError as e:
|
||||
self.assertEqual(e.code, 404)
|
||||
exception_thrown = True
|
||||
self.assertTrue(exception_thrown)
|
||||
|
@ -162,12 +175,11 @@ class ApiTest(unittest.TestCase):
|
|||
# DEL: DEL should also return 404
|
||||
exception_thrown = False
|
||||
try:
|
||||
opener = urllib2.build_opener(urllib2.HTTPHandler)
|
||||
request = urllib2.Request(self.get_url('/api/resource/', server_port,
|
||||
None))
|
||||
opener = build_opener(HTTPHandler)
|
||||
request = Request(self.get_url('/api/resource/', server_port, None))
|
||||
request.get_method = lambda: 'DEL'
|
||||
opener.open(request)
|
||||
except urllib2.HTTPError:
|
||||
except HTTPError:
|
||||
self.assertEqual(e.code, 404)
|
||||
exception_thrown = True
|
||||
self.assertTrue(exception_thrown)
|
||||
|
@ -181,7 +193,7 @@ class ApiTest(unittest.TestCase):
|
|||
server_port = httpd.httpd.server_port
|
||||
|
||||
# We defined a docroot, so we expect a directory listing
|
||||
f = urllib2.urlopen(self.get_url('/', server_port, None))
|
||||
f = urlopen(self.get_url('/', server_port, None))
|
||||
try:
|
||||
self.assertEqual(f.getcode(), 200)
|
||||
except AttributeError:
|
||||
|
@ -197,7 +209,7 @@ class ProxyTest(unittest.TestCase):
|
|||
|
||||
def tearDown(self):
|
||||
# reset proxy opener in case it changed
|
||||
urllib2.install_opener(None)
|
||||
install_opener(None)
|
||||
|
||||
def test_proxy(self):
|
||||
docroot = tempfile.mkdtemp()
|
||||
|
@ -211,7 +223,7 @@ class ProxyTest(unittest.TestCase):
|
|||
|
||||
def index_contents(host): return '%s index' % host
|
||||
|
||||
index = file(os.path.join(docroot, index_filename), 'w')
|
||||
index = open(os.path.join(docroot, index_filename), 'w')
|
||||
index.write(index_contents('*'))
|
||||
index.close()
|
||||
|
||||
|
@ -219,12 +231,13 @@ class ProxyTest(unittest.TestCase):
|
|||
httpd.start(block=False)
|
||||
server_port = httpd.httpd.server_port
|
||||
|
||||
proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
|
||||
server_port})
|
||||
urllib2.install_opener(urllib2.build_opener(proxy_support))
|
||||
proxy_support = ProxyHandler({
|
||||
'http': 'http://127.0.0.1:%d' % server_port,
|
||||
})
|
||||
install_opener(build_opener(proxy_support))
|
||||
|
||||
for host in hosts:
|
||||
f = urllib2.urlopen(url(host))
|
||||
f = urlopen(url(host))
|
||||
try:
|
||||
self.assertEqual(f.getcode(), 200)
|
||||
except AttributeError:
|
||||
|
@ -239,18 +252,19 @@ class ProxyTest(unittest.TestCase):
|
|||
httpd.start(block=False)
|
||||
server_port = httpd.httpd.server_port
|
||||
|
||||
proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
|
||||
server_port})
|
||||
urllib2.install_opener(urllib2.build_opener(proxy_support))
|
||||
proxy_support = ProxyHandler({
|
||||
'http': 'http://127.0.0.1:%d' % server_port,
|
||||
})
|
||||
install_opener(build_opener(proxy_support))
|
||||
|
||||
# set up dirs
|
||||
for host in hosts:
|
||||
os.mkdir(os.path.join(docroot, host))
|
||||
file(os.path.join(docroot, host, index_filename), 'w') \
|
||||
open(os.path.join(docroot, host, index_filename), 'w') \
|
||||
.write(index_contents(host))
|
||||
|
||||
for host in hosts:
|
||||
f = urllib2.urlopen(url(host))
|
||||
f = urlopen(url(host))
|
||||
try:
|
||||
self.assertEqual(f.getcode(), 200)
|
||||
except AttributeError:
|
||||
|
@ -259,8 +273,8 @@ class ProxyTest(unittest.TestCase):
|
|||
|
||||
exc = None
|
||||
try:
|
||||
urllib2.urlopen(url(unproxied_host))
|
||||
except urllib2.HTTPError as e:
|
||||
urlopen(url(unproxied_host))
|
||||
except HTTPError as e:
|
||||
exc = e
|
||||
self.assertNotEqual(exc, None)
|
||||
self.assertEqual(exc.code, 404)
|
||||
|
|
Загрузка…
Ссылка в новой задаче