Switch all urllib/urlparse usage to six.moves.urllib.
This provides Python 2+3 compatibility for our use of urllib. Also add a test to avoid future regressions.
This commit is contained in:
parent
36131a385b
commit
4dc5761ce2
|
@ -3,7 +3,7 @@ import sys
|
|||
import time
|
||||
import datetime
|
||||
import optparse
|
||||
import urlparse
|
||||
from six.moves import urllib
|
||||
import itertools
|
||||
import traceback
|
||||
import os
|
||||
|
@ -56,12 +56,12 @@ except ImportError:
|
|||
parser.error('Install python-gdata')
|
||||
|
||||
def get_calendar_url():
|
||||
parts = urlparse.urlparse(options.calendar)
|
||||
parts = urllib.parse.urlparse(options.calendar)
|
||||
pat = os.path.split(parts.path)
|
||||
if pat[1] != 'basic':
|
||||
parser.error('The --calendar URL should be the XML "Private Address" ' +
|
||||
'from your calendar settings')
|
||||
return urlparse.urlunparse((parts.scheme, parts.netloc, pat[0] + '/full',
|
||||
return urllib.parse.urlunparse((parts.scheme, parts.netloc, pat[0] + '/full',
|
||||
'', 'futureevents=true&orderby=startdate', ''))
|
||||
|
||||
calendar_url = get_calendar_url()
|
||||
|
|
|
@ -37,7 +37,7 @@ import json
|
|||
import logging
|
||||
import os
|
||||
import time
|
||||
import urllib2
|
||||
from six.moves import urllib
|
||||
|
||||
import sys
|
||||
|
||||
|
@ -74,8 +74,8 @@ def fetch_from_asana(path):
|
|||
headers = {"Authorization": "Basic %s" % auth}
|
||||
|
||||
url = "https://app.asana.com/api/1.0" + path
|
||||
request = urllib2.Request(url, None, headers)
|
||||
result = urllib2.urlopen(request)
|
||||
request = urllib.request.Request(url, None, headers)
|
||||
result = urllib.request.urlopen(request)
|
||||
|
||||
return json.load(result)
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ import optparse
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import urlparse
|
||||
from six.moves import urllib
|
||||
|
||||
import feedparser
|
||||
import zulip
|
||||
|
@ -169,7 +169,7 @@ client = zulip.Client(email=opts.email, api_key=opts.api_key,
|
|||
first_message = True
|
||||
|
||||
for feed_url in feed_urls:
|
||||
feed_file = os.path.join(opts.data_dir, urlparse.urlparse(feed_url).netloc)
|
||||
feed_file = os.path.join(opts.data_dir, urllib.parse.urlparse(feed_url).netloc)
|
||||
|
||||
try:
|
||||
with open(feed_file, "r") as f:
|
||||
|
|
|
@ -26,16 +26,15 @@ import simplejson
|
|||
import requests
|
||||
import time
|
||||
import traceback
|
||||
import urlparse
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
import platform
|
||||
import urllib
|
||||
import random
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from six.moves.configparser import SafeConfigParser
|
||||
from six.moves import urllib
|
||||
import logging
|
||||
import six
|
||||
|
||||
|
@ -289,7 +288,7 @@ class Client(object):
|
|||
kwargs = {kwarg: query_state["request"]}
|
||||
res = requests.request(
|
||||
method,
|
||||
urlparse.urljoin(self.base_url, url),
|
||||
urllib.parse.urljoin(self.base_url, url),
|
||||
auth=requests.auth.HTTPBasicAuth(self.email,
|
||||
self.api_key),
|
||||
verify=self.tls_verification, timeout=90,
|
||||
|
@ -468,7 +467,7 @@ Client._register('list_subscriptions', method='GET', url='users/me/subscriptions
|
|||
Client._register('add_subscriptions', url='users/me/subscriptions', make_request=_mk_subs)
|
||||
Client._register('remove_subscriptions', method='PATCH', url='users/me/subscriptions', make_request=_mk_rm_subs)
|
||||
Client._register('get_subscribers', method='GET',
|
||||
computed_url=lambda request: 'streams/%s/members' % (urllib.quote(request['stream'], safe=''),),
|
||||
computed_url=lambda request: 'streams/%s/members' % (urllib.parse.quote(request['stream'], safe=''),),
|
||||
make_request=_kwargs_to_dict)
|
||||
Client._register('render_message', method='GET', url='messages/render')
|
||||
Client._register('create_user', method='POST', url='users')
|
||||
|
|
Loading…
Reference in a new issue