# HG changeset patch # User Mads Kiilerich # Date 1579728613 -3600 # Node ID 7e9d3865b4f98457069c7e02818bab07a2f835be # Parent a455353077c83f88b4e33d4f0dc9c42746b22bb7 py3: support new stdlib module names configparser, urllib and http lib From "2to3 -f imports". diff -r a455353077c8 -r 7e9d3865b4f9 kallithea/bin/ldap_sync.py --- a/kallithea/bin/ldap_sync.py Wed Feb 05 23:04:27 2020 +0100 +++ b/kallithea/bin/ldap_sync.py Wed Jan 22 22:30:13 2020 +0100 @@ -29,7 +29,7 @@ import urllib2 import uuid -from ConfigParser import ConfigParser +from configparser import ConfigParser import ldap diff -r a455353077c8 -r 7e9d3865b4f9 kallithea/lib/helpers.py --- a/kallithea/lib/helpers.py Wed Feb 05 23:04:27 2020 +0100 +++ b/kallithea/lib/helpers.py Wed Jan 22 22:30:13 2020 +0100 @@ -23,7 +23,7 @@ import random import re import textwrap -import urlparse +import urllib.parse from beaker.cache import cache_region from pygments import highlight as code_highlight @@ -932,7 +932,7 @@ if email_address == _def: return default - parsed_url = urlparse.urlparse(url.current(qualified=True)) + parsed_url = urllib.parse.urlparse(url.current(qualified=True)) url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \ .replace('{email}', email_address) \ .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \ diff -r a455353077c8 -r 7e9d3865b4f9 kallithea/tests/functional/test_login.py --- a/kallithea/tests/functional/test_login.py Wed Feb 05 23:04:27 2020 +0100 +++ b/kallithea/tests/functional/test_login.py Wed Jan 22 22:30:13 2020 +0100 @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import re import time -import urlparse +import urllib.parse import mock from tg.util.webtest import test_context @@ -172,8 +172,8 @@ repo_name=base.HG_REPO, **args)) assert response.status == '302 Found' - came_from = urlparse.parse_qs(urlparse.urlparse(response.location).query)['came_from'][0] - came_from_qs = urlparse.parse_qsl(urlparse.urlparse(came_from).query) + came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.location).query)['came_from'][0] + came_from_qs = urllib.parse.parse_qsl(urllib.parse.urlparse(came_from).query) assert sorted(came_from_qs) == sorted((k, v.encode('utf-8')) for k, v in args.items()) @base.parametrize('args,args_encoded', [ @@ -184,7 +184,7 @@ def test_login_form_preserves_get_args(self, args, args_encoded): response = self.app.get(base.url(controller='login', action='index', came_from=base.url('/_admin/users', **args))) - came_from = urlparse.parse_qs(urlparse.urlparse(response.form.action).query)['came_from'][0] + came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0] for encoded in args_encoded: assert encoded in came_from @@ -216,7 +216,7 @@ '_session_csrf_secret_token': self.session_csrf_secret_token()}) response.mustcontain('Invalid username or password') - came_from = urlparse.parse_qs(urlparse.urlparse(response.form.action).query)['came_from'][0] + came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0] for encoded in args_encoded: assert encoded in came_from @@ -429,7 +429,7 @@ (confirmation_url,) = (line for line in body.splitlines() if line.startswith('http://')) assert ' href="%s"' % confirmation_url.replace('&', '&').replace('@', '%40') in html_body - d = urlparse.parse_qs(urlparse.urlparse(confirmation_url).query) + d = urllib.parse.parse_qs(urllib.parse.urlparse(confirmation_url).query) assert d['token'] == [token] assert d['timestamp'] == [str(timestamp)] assert d['email'] == [email] diff -r a455353077c8 -r 7e9d3865b4f9 kallithea/tests/scripts/manual_test_crawler.py --- a/kallithea/tests/scripts/manual_test_crawler.py Wed Feb 05 23:04:27 2020 +0100 +++ b/kallithea/tests/scripts/manual_test_crawler.py Wed Jan 22 22:30:13 2020 +0100 @@ -32,7 +32,7 @@ from __future__ import print_function -import cookielib +import http.cookiejar import os import sys import tempfile @@ -72,7 +72,7 @@ ] -cj = cookielib.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt')) +cj = http.cookiejar.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt')) o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) o.addheaders = [ ('User-agent', 'kallithea-crawler'),