changeset 8368:e5ccabbc3cd7 stable

release: merge default to stable for 0.6.0
author Thomas De Schampheleire <thomas.de_schampheleire@nokia.com>
date Sat, 02 May 2020 21:20:43 +0200
parents b1b1f69b1f28 (current diff) 90dd59c2a76a (diff)
children f5e0fa641336
files
diffstat 327 files changed, 8424 insertions(+), 7327 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/.eslintrc.js	Sat May 02 21:20:43 2020 +0200
@@ -0,0 +1,21 @@
+module.exports = {
+    "env": {
+        "browser": true,
+        "es6": true,
+        "jquery": true
+    },
+    "extends": "eslint:recommended",
+    "globals": {
+        "Atomics": "readonly",
+        "SharedArrayBuffer": "readonly"
+    },
+    "parserOptions": {
+        "ecmaVersion": 2018,
+        "sourceType": "module"
+    },
+    "plugins": [
+        "html"
+    ],
+    "rules": {
+    }
+};
--- a/Jenkinsfile	Thu Apr 09 18:03:56 2020 +0200
+++ b/Jenkinsfile	Sat May 02 21:20:43 2020 +0200
@@ -9,10 +9,10 @@
                               daysToKeepStr: '',
                               numToKeepStr: '']]]);
     if (isUnix()) {
-        createvirtualenv = 'rm -r $JENKINS_HOME/venv/$JOB_NAME || true && virtualenv $JENKINS_HOME/venv/$JOB_NAME'
+        createvirtualenv = 'rm -r $JENKINS_HOME/venv/$JOB_NAME || true && python3 -m venv $JENKINS_HOME/venv/$JOB_NAME'
         activatevirtualenv = '. $JENKINS_HOME/venv/$JOB_NAME/bin/activate'
     } else {
-        createvirtualenv = 'rmdir /s /q %JENKINS_HOME%\\venv\\%JOB_NAME% || true && virtualenv %JENKINS_HOME%\\venv\\%JOB_NAME%'
+        createvirtualenv = 'rmdir /s /q %JENKINS_HOME%\\venv\\%JOB_NAME% || true && python3 -m venv %JENKINS_HOME%\\venv\\%JOB_NAME%'
         activatevirtualenv = 'call %JENKINS_HOME%\\venv\\%JOB_NAME%\\Scripts\\activate.bat'
     }
 
--- a/README.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/README.rst	Sat May 02 21:20:43 2020 +0200
@@ -24,8 +24,8 @@
 Installation
 ------------
 
-Kallithea requires Python_ 2.7 and it is recommended to install it in a
-virtualenv_. Official releases of Kallithea can be installed with::
+Kallithea requires Python_ 3 and it is recommended to install it in a
+virtualenv. Official releases of Kallithea can be installed with::
 
     pip install kallithea
 
@@ -173,7 +173,6 @@
 of Kallithea.
 
 
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
 .. _Python: http://www.python.org/
 .. _Sphinx: http://sphinx.pocoo.org/
 .. _Mercurial: http://mercurial.selenic.com/
--- a/conftest.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/conftest.py	Sat May 02 21:20:43 2020 +0200
@@ -2,10 +2,19 @@
 
 import mock
 import pytest
+import tg
 
 
 here = os.path.dirname(__file__)
 
+# HACK:
+def pytest_configure():
+    # Register global dummy tg.context to avoid "TypeError: No object (name: context) has been registered for this thread"
+    tg.request_local.context._push_object(tg.util.bunch.Bunch())
+    # could be removed again after use with
+    # tg.request_local.context._pop_object ... but we keep it around forever as
+    # a reasonable sentinel
+
 def pytest_ignore_collect(path):
     # ignore all files outside the 'kallithea' directory
     if not str(path).startswith(os.path.join(here, 'kallithea')):
@@ -36,3 +45,10 @@
     m = __import__(request.module.__name__, globals(), locals(), [None], 0)
     with mock.patch.object(m, '_', lambda s: s):
         yield
+
+if getattr(pytest, 'register_assert_rewrite', None):
+    # make sure that all asserts under kallithea/tests benefit from advanced
+    # assert reporting with pytest-3.0.0+, including api/api_base.py,
+    # models/common.py etc.
+    # See also: https://docs.pytest.org/en/latest/assert.html#advanced-assertion-introspection
+    pytest.register_assert_rewrite('kallithea.tests')
--- a/dev_requirements.txt	Thu Apr 09 18:03:56 2020 +0200
+++ b/dev_requirements.txt	Sat May 02 21:20:43 2020 +0200
@@ -1,8 +1,9 @@
-pytest >= 4.6.6, < 4.7
+pytest >= 4.6.6, < 5.4
 pytest-sugar >= 0.9.2, < 0.10
 pytest-benchmark >= 3.2.2, < 3.3
 pytest-localserver >= 0.5.0, < 0.6
-mock >= 3.0.0, < 3.1
-Sphinx >= 1.8.0, < 1.9
-WebTest >= 2.0.3, < 2.1
+mock >= 3.0.0, < 4.1
+Sphinx >= 1.8.0, < 2.4
+WebTest >= 2.0.6, < 2.1
 isort == 4.3.21
+pyflakes == 2.1.1
--- a/development.ini	Thu Apr 09 18:03:56 2020 +0200
+++ b/development.ini	Sat May 02 21:20:43 2020 +0200
@@ -1,10 +1,10 @@
-################################################################################
-################################################################################
-# Kallithea - config file generated with kallithea-config                      #
-#                                                                              #
-# The %(here)s variable will be replaced with the parent directory of this file#
-################################################################################
-################################################################################
+###################################################################################
+###################################################################################
+## Kallithea config file generated with kallithea-config                         ##
+##                                                                               ##
+## The %(here)s variable will be replaced with the parent directory of this file ##
+###################################################################################
+###################################################################################
 
 [DEFAULT]
 
@@ -126,7 +126,7 @@
 ## used, which is correct in many cases but for example not when using uwsgi.
 ## If you change this setting, you should reinstall the Git hooks via
 ## Admin > Settings > Remap and Rescan.
-# git_hook_interpreter = /srv/kallithea/venv/bin/python2
+#git_hook_interpreter = /srv/kallithea/venv/bin/python3
 
 ## path to git executable
 git_path = git
@@ -198,7 +198,7 @@
 ## issue_pat, issue_server_link and issue_sub can have suffixes to specify
 ## multiple patterns, to other issues server, wiki or others
 ## below an example how to create a wiki pattern
-# wiki-some-id -> https://wiki.example.com/some-id
+## wiki-some-id -> https://wiki.example.com/some-id
 
 #issue_pat_wiki = wiki-(\S+)
 #issue_server_link_wiki = https://wiki.example.com/\1
@@ -216,12 +216,12 @@
 allow_custom_hooks_settings = True
 
 ## extra extensions for indexing, space separated and without the leading '.'.
-# index.extensions =
+#index.extensions =
 #    gemfile
 #    lock
 
 ## extra filenames for indexing, space separated
-# index.filenames =
+#index.filenames =
 #    .dockerignore
 #    .editorconfig
 #    INSTALL
@@ -250,25 +250,23 @@
 ###        CELERY CONFIG        ####
 ####################################
 
+## Note: Celery doesn't support Windows.
 use_celery = false
 
-## Example: connect to the virtual host 'rabbitmqhost' on localhost as rabbitmq:
-broker.url = amqp://rabbitmq:qewqew@localhost:5672/rabbitmqhost
+## Celery config settings from https://docs.celeryproject.org/en/4.4.0/userguide/configuration.html prefixed with 'celery.'.
 
-celery.imports = kallithea.lib.celerylib.tasks
-celery.accept.content = pickle
-celery.result.backend = amqp
-celery.result.dburi = amqp://
-celery.result.serializer = json
+## Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea':
+celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost
 
-#celery.send.task.error.emails = true
+celery.result.backend = db+sqlite:///celery-results.db
+
 #celery.amqp.task.result.expires = 18000
 
-celeryd.concurrency = 2
-celeryd.max.tasks.per.child = 1
+celery.worker_concurrency = 2
+celery.worker_max_tasks_per_child = 1
 
 ## If true, tasks will never be sent to the queue, but executed locally instead.
-celery.always.eager = false
+celery.task_always_eager = false
 
 ####################################
 ###         BEAKER CACHE        ####
@@ -277,19 +275,15 @@
 beaker.cache.data_dir = %(here)s/data/cache/data
 beaker.cache.lock_dir = %(here)s/data/cache/lock
 
-beaker.cache.regions = short_term,long_term,sql_cache_short
-
-beaker.cache.short_term.type = memory
-beaker.cache.short_term.expire = 60
-beaker.cache.short_term.key_length = 256
+beaker.cache.regions = long_term,long_term_file
 
 beaker.cache.long_term.type = memory
 beaker.cache.long_term.expire = 36000
 beaker.cache.long_term.key_length = 256
 
-beaker.cache.sql_cache_short.type = memory
-beaker.cache.sql_cache_short.expire = 10
-beaker.cache.sql_cache_short.key_length = 256
+beaker.cache.long_term_file.type = file
+beaker.cache.long_term_file.expire = 604800
+beaker.cache.long_term_file.key_length = 256
 
 ####################################
 ###       BEAKER SESSION        ####
@@ -324,12 +318,25 @@
 #session.sa.url = postgresql://postgres:qwe@localhost/kallithea
 #session.table_name = db_session
 
-############################
-## ERROR HANDLING SYSTEMS ##
-############################
+####################################
+###       ERROR HANDLING        ####
+####################################
+
+## Show a nice error page for application HTTP errors and exceptions (default true)
+#errorpage.enabled = true
 
-# Propagate email settings to ErrorReporter of TurboGears2
-# You do not normally need to change these lines
+## Enable Backlash client-side interactive debugger (default false)
+## WARNING: *THIS MUST BE false IN PRODUCTION ENVIRONMENTS!!!*
+## This debug mode will allow all visitors to execute malicious code.
+#debug = false
+debug = true
+
+## Enable Backlash server-side error reporting (unless debug mode handles it client-side) (default true)
+#trace_errors.enable = true
+## Errors will be reported by mail if trace_errors.error_email is set.
+
+## Propagate email settings to ErrorReporter of TurboGears2
+## You do not normally need to change these lines
 get trace_errors.smtp_server = smtp_server
 get trace_errors.smtp_port = smtp_port
 get trace_errors.from_address = error_email_from
@@ -338,13 +345,6 @@
 get trace_errors.smtp_password = smtp_password
 get trace_errors.smtp_use_tls = smtp_use_tls
 
-################################################################################
-## WARNING: *DEBUG MODE MUST BE OFF IN A PRODUCTION ENVIRONMENT*              ##
-## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
-## execute malicious code after an exception is raised.                       ##
-################################################################################
-#debug = false
-debug = true
 
 ##################################
 ###       LOGVIEW CONFIG       ###
@@ -358,10 +358,10 @@
 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 #########################################################
 
-# SQLITE [default]
+## SQLITE [default]
 sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
 
-# see sqlalchemy docs for others
+## see sqlalchemy docs for other backends
 
 sqlalchemy.pool_recycle = 3600
 
@@ -392,9 +392,8 @@
 [logger_root]
 level = NOTSET
 #handlers = console
+## For coloring based on log level:
 handlers = console_color
-# For coloring based on log level:
-# handlers = console_color
 
 [logger_routes]
 #level = WARN
@@ -437,10 +436,10 @@
 level = WARN
 handlers =
 qualname = sqlalchemy.engine
-# For coloring based on log level and pretty printing of SQL:
-# level = INFO
-# handlers = console_color_sql
-# propagate = 0
+## For coloring based on log level and pretty printing of SQL:
+#level = INFO
+#handlers = console_color_sql
+#propagate = 0
 
 [logger_whoosh_indexer]
 #level = WARN
@@ -468,13 +467,13 @@
 formatter = generic
 
 [handler_console_color]
-# ANSI color coding based on log level
+## ANSI color coding based on log level
 class = StreamHandler
 args = (sys.stderr,)
 formatter = color_formatter
 
 [handler_console_color_sql]
-# ANSI color coding and pretty printing of SQL statements
+## ANSI color coding and pretty printing of SQL statements
 class = StreamHandler
 args = (sys.stderr,)
 formatter = color_formatter_sql
@@ -505,16 +504,16 @@
 ## SSH LOGGING ##
 #################
 
-# The default loggers use 'handler_console' that uses StreamHandler with
-# destination 'sys.stderr'. In the context of the SSH server process, these log
-# messages would be sent to the client, which is normally not what you want.
-# By default, when running ssh-serve, just use NullHandler and disable logging
-# completely. For other logging options, see:
-# https://docs.python.org/2/library/logging.handlers.html
+## The default loggers use 'handler_console' that uses StreamHandler with
+## destination 'sys.stderr'. In the context of the SSH server process, these log
+## messages would be sent to the client, which is normally not what you want.
+## By default, when running ssh-serve, just use NullHandler and disable logging
+## completely. For other logging options, see:
+## https://docs.python.org/2/library/logging.handlers.html
 
 [ssh_serve:logger_root]
 level = CRITICAL
 handlers = null
 
-# Note: If logging is configured with other handlers, they might need similar
-# muting for ssh-serve too.
+## Note: If logging is configured with other handlers, they might need similar
+## muting for ssh-serve too.
--- a/docs/administrator_guide/auth.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/administrator_guide/auth.rst	Sat May 02 21:20:43 2020 +0200
@@ -135,10 +135,10 @@
 .. _Custom CA Certificates:
 
 Custom CA Certificates : optional
-    Directory used by OpenSSL to find CAs for validating the LDAP server certificate.
-    Python 2.7.10 and later default to using the system certificate store, and
-    this should thus not be necessary when using certificates signed by a CA
-    trusted by the system.
+    Directory used by OpenSSL to find CAs for validating the LDAP server
+    certificate. It defaults to using the system certificate store, and it
+    should thus not be necessary to specify *Custom CA Certificates* when using
+    certificates signed by a CA trusted by the system.
     It can be set to something like `/etc/openldap/cacerts` on older systems or
     if using self-signed certificates.
 
--- a/docs/api/models.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/api/models.rst	Sat May 02 21:20:43 2020 +0200
@@ -13,9 +13,6 @@
 .. automodule:: kallithea.model.permission
    :members:
 
-.. automodule:: kallithea.model.repo_permission
-   :members:
-
 .. automodule:: kallithea.model.repo
    :members:
 
--- a/docs/conf.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/conf.py	Sat May 02 21:20:43 2020 +0200
@@ -46,8 +46,8 @@
 master_doc = 'index'
 
 # General information about the project.
-project = u'Kallithea'
-copyright = u'2010-2020 by various authors, licensed as GPLv3.'
+project = 'Kallithea'
+copyright = '2010-2020 by various authors, licensed as GPLv3.'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
@@ -187,8 +187,8 @@
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'Kallithea.tex', u'Kallithea Documentation',
-   u'Kallithea Developers', 'manual'),
+  ('index', 'Kallithea.tex', 'Kallithea Documentation',
+   'Kallithea Developers', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of
@@ -220,8 +220,8 @@
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
-    ('index', 'kallithea', u'Kallithea Documentation',
-     [u'Kallithea Developers'], 1)
+    ('index', 'kallithea', 'Kallithea Documentation',
+     ['Kallithea Developers'], 1)
 ]
 
 
--- a/docs/contributing.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/contributing.rst	Sat May 02 21:20:43 2020 +0200
@@ -32,7 +32,7 @@
 
         hg clone https://kallithea-scm.org/repos/kallithea
         cd kallithea
-        virtualenv ../kallithea-venv
+        python3 -m venv ../kallithea-venv
         source ../kallithea-venv/bin/activate
         pip install --upgrade pip setuptools
         pip install --upgrade -e . -r dev_requirements.txt python-ldap python-pam
@@ -92,8 +92,7 @@
 and the test suite creates repositories in the temporary directory. Linux
 systems with /tmp mounted noexec will thus fail.
 
-You can also use ``tox`` to run the tests with all supported Python versions
-(currently only Python 2.7).
+You can also use ``tox`` to run the tests with all supported Python versions.
 
 When running tests, Kallithea generates a `test.ini` based on template values
 in `kallithea/tests/conftest.py` and populates the SQLite database specified
@@ -199,8 +198,7 @@
 consistency with existing code. Run ``scripts/run-all-cleanup`` before
 committing to ensure some basic code formatting consistency.
 
-We currently only support Python 2.7.x and nothing else. For now we don't care
-about Python 3 compatibility.
+We support Python 3.6 and later.
 
 We try to support the most common modern web browsers. IE9 is still supported
 to the extent it is feasible, IE8 is not.
@@ -238,8 +236,8 @@
 as in an independent database transaction). ``Session`` is the session manager
 and factory. ``Session()`` will create a new session on-demand or return the
 current session for the active thread. Many database operations are methods on
-such session instances - only ``Session.remove()`` should be called directly on
-the manager.
+such session instances. The session will generally be removed by
+TurboGears automatically.
 
 Database model objects
 (almost) always belong to a particular SQLAlchemy session, which means
@@ -268,6 +266,20 @@
 a freshly created model object (before flushing, the ID attribute will
 be ``None``).
 
+Debugging
+^^^^^^^^^
+
+A good way to trace what Kallithea is doing is to keep an eye on the output of
+stdout/stderr from the server process. Perhaps change ``my.ini`` to log at
+``DEBUG`` or ``INFO`` level, especially ``[logger_kallithea]``, but perhaps
+also other loggers. It is often easier to add additional ``log`` or ``print``
+statements than to use a Python debugger.
+
+Sometimes it is simpler to disable ``errorpage.enabled`` and perhaps also
+``trace_errors.enable`` to expose raw errors instead of adding extra
+processing. Enabling ``debug`` can be helpful for showing and exploring
+tracebacks in the browser, but is also insecure and will add extra processing.
+
 TurboGears2 DebugBar
 ^^^^^^^^^^^^^^^^^^^^
 
--- a/docs/index.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/index.rst	Sat May 02 21:20:43 2020 +0200
@@ -78,7 +78,6 @@
    dev/dbmigrations
 
 
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
 .. _python: http://www.python.org/
 .. _django: http://www.djangoproject.com/
 .. _mercurial: https://www.mercurial-scm.org/
--- a/docs/installation.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/installation.rst	Sat May 02 21:20:43 2020 +0200
@@ -35,12 +35,12 @@
 For Debian and Ubuntu, the following command will ensure that a reasonable
 set of dependencies is installed::
 
-    sudo apt-get install build-essential git python-pip python-virtualenv libffi-dev python-dev
+    sudo apt-get install build-essential git libffi-dev python3-dev
 
 For Fedora and RHEL-derivatives, the following command will ensure that a
 reasonable set of dependencies is installed::
 
-    sudo yum install gcc git python-pip python-virtualenv libffi-devel python-devel
+    sudo yum install gcc git libffi-devel python3-devel
 
 .. _installation-source:
 
@@ -48,16 +48,16 @@
 Installation from repository source
 -----------------------------------
 
-To install Kallithea in a virtualenv_ using the stable branch of the development
+To install Kallithea in a virtualenv using the stable branch of the development
 repository, follow the instructions below::
 
         hg clone https://kallithea-scm.org/repos/kallithea -u stable
         cd kallithea
-        virtualenv ../kallithea-venv
+        python3 -m venv ../kallithea-venv
         . ../kallithea-venv/bin/activate
         pip install --upgrade pip setuptools
         pip install --upgrade -e .
-        python2 setup.py compile_catalog   # for translation of the UI
+        python3 setup.py compile_catalog   # for translation of the UI
 
 You can now proceed to :ref:`setup`.
 
@@ -67,18 +67,18 @@
 Installing a released version in a virtualenv
 ---------------------------------------------
 
-It is highly recommended to use a separate virtualenv_ for installing Kallithea.
+It is highly recommended to use a separate virtualenv for installing Kallithea.
 This way, all libraries required by Kallithea will be installed separately from your
 main Python installation and other applications and things will be less
 problematic when upgrading the system or Kallithea.
-An additional benefit of virtualenv_ is that it doesn't require root privileges.
+An additional benefit of virtualenv is that it doesn't require root privileges.
 
-- Assuming you have installed virtualenv_, create a new virtual environment
-  for example, in `/srv/kallithea/venv`, using the virtualenv command::
+- Assuming you have installed virtualenv, create a new virtual environment
+  for example, in `/srv/kallithea/venv`, using the venv command::
 
-    virtualenv /srv/kallithea/venv
+    python3 -m venv /srv/kallithea/venv
 
-- Activate the virtualenv_ in your current shell session and make sure the
+- Activate the virtualenv in your current shell session and make sure the
   basic requirements are up-to-date by running::
 
     . /srv/kallithea/venv/bin/activate
@@ -133,6 +133,3 @@
     pip install --user kallithea
 
 You can now proceed to :ref:`setup`.
-
-
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
--- a/docs/installation_iis.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/installation_iis.rst	Sat May 02 21:20:43 2020 +0200
@@ -1,5 +1,7 @@
 .. _installation_iis:
 
+.. warning:: This section is outdated and needs updating for Python 3.
+
 =====================================================================
 Installing Kallithea on Microsoft Internet Information Services (IIS)
 =====================================================================
@@ -66,7 +68,7 @@
 has been generated, it is necessary to run the following command due to the way
 that ISAPI-WSGI is made::
 
-    python2 dispatch.py install
+    python3 dispatch.py install
 
 This accomplishes two things: generating an ISAPI compliant DLL file,
 ``_dispatch.dll``, and installing a script map handler into IIS for the
@@ -119,7 +121,7 @@
 In order to dump output from WSGI using ``win32traceutil`` it is sufficient to
 type the following in a console window::
 
-    python2 -m win32traceutil
+    python3 -m win32traceutil
 
 and any exceptions occurring in the WSGI layer and below (i.e. in the Kallithea
 application itself) that are uncaught, will be printed here complete with stack
--- a/docs/installation_win.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/installation_win.rst	Sat May 02 21:20:43 2020 +0200
@@ -1,5 +1,7 @@
 .. _installation_win:
 
+.. warning:: This section is outdated and needs updating for Python 3.
+
 ====================================================
 Installation on Windows (7/Server 2008 R2 and newer)
 ====================================================
@@ -17,18 +19,16 @@
 Step 1 -- Install Python
 ^^^^^^^^^^^^^^^^^^^^^^^^
 
-Install Python 2.7.x. Latest version is recommended. If you need another version, they can run side by side.
+Install Python 3. Latest version is recommended. If you need another version, they can run side by side.
 
-.. warning:: Python 3.x is not supported.
-
-- Download Python 2.7.x from http://www.python.org/download/
+- Download Python 3 from http://www.python.org/download/
 - Choose and click on the version
 - Click on "Windows X86-64 Installer" for x64 or "Windows x86 MSI installer" for Win32.
 - Disable UAC or run the installer with admin privileges. If you chose to disable UAC, do not forget to reboot afterwards.
 
-While writing this guide, the latest version was v2.7.9.
+While writing this guide, the latest version was v3.8.1.
 Remember the specific major and minor versions installed, because they will
-be needed in the next step. In this case, it is "2.7".
+be needed in the next step. In this case, it is "3.8".
 
 Step 2 -- Python BIN
 ^^^^^^^^^^^^^^^^^^^^
@@ -42,7 +42,7 @@
   SETX PATH "%PATH%;[your-python-path]" /M
 
 Please substitute [your-python-path] with your Python installation
-path. Typically this is ``C:\\Python27``.
+path. Typically this is ``C:\\Python38``.
 
 Step 3 -- Install pywin32 extensions
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -52,38 +52,14 @@
 
 - Click on "pywin32" folder
 - Click on the first folder (in this case, Build 219, maybe newer when you try)
-- Choose the file ending with ".amd64-py2.x.exe" (".win32-py2.x.exe"
+- Choose the file ending with ".amd64-py3.x.exe" (".win32-py3.x.exe"
   for Win32) where x is the minor version of Python you installed.
   When writing this guide, the file was:
-  http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win-amd64-py2.7.exe/download
+  http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win-amd64-py3.8.exe/download
   (x64)
-  http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win32-py2.7.exe/download
+  http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win32-py3.8.exe/download
   (Win32)
 
-Step 4 -- Install pip
-^^^^^^^^^^^^^^^^^^^^^
-
-pip is a package management system for Python. You will need it to install Kallithea and its dependencies.
-
-If you installed Python 2.7.9+, you already have it (as long as you ran the installer with admin privileges or disabled UAC).
-
-If it was not installed or if you are using Python < 2.7.9:
-
-- Go to https://bootstrap.pypa.io
-- Right-click on get-pip.py and choose Saves as...
-- Run "python2 get-pip.py" in the folder where you downloaded get-pip.py (may require admin access).
-
-.. note::
-
-   See http://stackoverflow.com/questions/4750806/how-to-install-pip-on-windows
-   for details and alternative methods.
-
-Note that pip.exe will be placed inside your Python installation's
-Scripts folder, which is likely not on your path. To correct this,
-open a CMD and type::
-
-  SETX PATH "%PATH%;[your-python-path]\Scripts" /M
-
 Step 5 -- Kallithea folder structure
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
@@ -108,24 +84,18 @@
    A python virtual environment will allow for isolation between the Python packages of your system and those used for Kallithea.
    It is strongly recommended to use it to ensure that Kallithea does not change a dependency that other software uses or vice versa.
 
-In a command prompt type::
-
-  pip install virtualenv
-
-Virtualenv will now be inside your Python Scripts path (C:\\Python27\\Scripts or similar).
-
 To create a virtual environment, run::
 
-  virtualenv C:\Kallithea\Env
+  python3 -m venv C:\Kallithea\Env
 
 Step 7 -- Install Kallithea
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 In order to install Kallithea, you need to be able to run "pip install kallithea". It will use pip to install the Kallithea Python package and its dependencies.
 Some Python packages use managed code and need to be compiled.
-This can be done on Linux without any special steps. On Windows, you will need to install Microsoft Visual C++ compiler for Python 2.7.
+This can be done on Linux without any special steps. On Windows, you will need to install Microsoft Visual C++ compiler for Python 3.8.
 
-Download and install "Microsoft Visual C++ Compiler for Python 2.7" from http://aka.ms/vcpython27
+Download and install "Microsoft Visual C++ Compiler for Python 3.8" from http://aka.ms/vcpython27
 
 .. note::
   You can also install the dependencies using already compiled Windows binaries packages. A good source of compiled Python packages is http://www.lfd.uci.edu/~gohlke/pythonlibs/. However, not all of the necessary packages for Kallithea are on this site and some are hard to find, so we will stick with using the compiler.
--- a/docs/installation_win_old.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/installation_win_old.rst	Sat May 02 21:20:43 2020 +0200
@@ -1,5 +1,7 @@
 .. _installation_win_old:
 
+.. warning:: This section is outdated and needs updating for Python 3.
+
 ==========================================================
 Installation on Windows (XP/Vista/Server 2003/Server 2008)
 ==========================================================
@@ -60,14 +62,11 @@
 Step 2 -- Install Python
 ^^^^^^^^^^^^^^^^^^^^^^^^
 
-Install Python 2.7.x x86 version (32-bit). DO NOT USE A 3.x version.
-Download Python 2.7.x from:
+Install Python 3.8.x from:
 http://www.python.org/download/
 
-Choose "Windows Installer" (32-bit version) not "Windows X86-64
-Installer". While writing this guide, the latest version was v2.7.3.
 Remember the specific major and minor version installed, because it will
-be needed in the next step. In this case, it is "2.7".
+be needed in the next step. In this case, it is "3.8".
 
 .. note::
 
@@ -80,17 +79,17 @@
 http://sourceforge.net/projects/pywin32/files/
 
 - Click on "pywin32" folder
-- Click on the first folder (in this case, Build 217, maybe newer when you try)
-- Choose the file ending with ".win32-py2.x.exe" -> x being the minor
+- Click on the first folder (in this case, Build 218, maybe newer when you try)
+- Choose the file ending with ".win32-py3.x.exe" -> x being the minor
   version of Python you installed (in this case, 7)
   When writing this guide, the file was:
-  http://sourceforge.net/projects/pywin32/files/pywin32/Build%20217/pywin32-217.win32-py2.7.exe/download
+  http://sourceforge.net/projects/pywin32/files/pywin32/Build%20218/pywin32-218.win-amd64-py3.8.exe/download
 
   .. note::
 
      64-bit: Download and install the 64-bit version.
      At the time of writing you can find this at:
-     http://sourceforge.net/projects/pywin32/files/pywin32/Build%20218/pywin32-218.win-amd64-py2.7.exe/download
+     http://sourceforge.net/projects/pywin32/files/pywin32/Build%20218/pywin32-218.win-amd64-py3.8.exe/download
 
 Step 4 -- Python BIN
 ^^^^^^^^^^^^^^^^^^^^
@@ -117,7 +116,7 @@
     SETX PATH "%PATH%;[your-python-path]" /M
 
   Please substitute [your-python-path] with your Python installation path.
-  Typically: C:\\Python27
+  Typically: C:\\Python38
 
 Step 5 -- Kallithea folder structure
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -139,22 +138,10 @@
 Step 6 -- Install virtualenv
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-Install Virtual Env for Python
-
-Navigate to: http://www.virtualenv.org/en/latest/index.html#installation
-Right click on "virtualenv.py" file and choose "Save link as...".
-Download to C:\\Kallithea (or whatever you want)
-(the file is located at
-https://raw.github.com/pypa/virtualenv/master/virtualenv.py)
+Create a virtual Python environment in C:\\Kallithea\\Env (or similar). To
+do so, open a CMD (Python Path should be included in Step3), and write::
 
-Create a virtual Python environment in C:\\Kallithea\\Env (or similar). To
-do so, open a CMD (Python Path should be included in Step3), navigate
-where you downloaded "virtualenv.py", and write::
-
-  python2 virtualenv.py C:\Kallithea\Env
-
-(--no-site-packages is now the default behaviour of virtualenv, no need
-to include it)
+  python3 -m venv C:\Kallithea\Env
 
 Step 7 -- Install Kallithea
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^
--- a/docs/overview.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/overview.rst	Sat May 02 21:20:43 2020 +0200
@@ -12,7 +12,7 @@
 ------------------
 
 **Kallithea** is written entirely in Python_ and requires Python version
-2.7 or higher. Python 3.x is currently not supported.
+3.6 or higher.
 
 Given a Python installation, there are different ways of providing the
 environment for running Python applications. Each of them pretty much
@@ -30,7 +30,7 @@
 - Packages could also be installed in ``~/.local`` ... but that is probably
   only a good idea if using a dedicated user per application or instance.
 
-- Finally, it can be installed in a virtualenv_. That is a very lightweight
+- Finally, it can be installed in a virtualenv. That is a very lightweight
   "container" where each Kallithea instance can get its own dedicated and
   self-contained virtual environment.
 
@@ -98,7 +98,7 @@
   installed with all dependencies using ``pip install kallithea``.
 
   With this method, Kallithea is installed in the Python environment as any
-  other package, usually as a ``.../site-packages/Kallithea-X-py2.7.egg/``
+  other package, usually as a ``.../site-packages/Kallithea-X-py3.8.egg/``
   directory with Python files and everything else that is needed.
 
   (``pip install kallithea`` from a source tree will do pretty much the same
@@ -165,7 +165,6 @@
 .. _Python: http://www.python.org/
 .. _Gunicorn: http://gunicorn.org/
 .. _Waitress: http://waitress.readthedocs.org/en/latest/
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
 .. _Gearbox: http://turbogears.readthedocs.io/en/latest/turbogears/gearbox.html
 .. _PyPI: https://pypi.python.org/pypi
 .. _Apache httpd: http://httpd.apache.org/
--- a/docs/setup.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/setup.rst	Sat May 02 21:20:43 2020 +0200
@@ -332,11 +332,11 @@
 
   use_celery = true
 
-and add or change the ``celery.*`` and ``broker.*`` configuration variables.
+and add or change the ``celery.*`` configuration variables.
 
-Remember that the ini files use the format with '.' and not with '_' like
-Celery. So for example setting `BROKER_HOST` in Celery means setting
-`broker.host` in the configuration file.
+Configuration settings are prefixed with 'celery.', so for example setting
+`broker_url` in Celery means setting `celery.broker_url` in the configuration
+file.
 
 To start the Celery process, run::
 
@@ -557,7 +557,7 @@
       os.chdir('/srv/kallithea/')
 
       import site
-      site.addsitedir("/srv/kallithea/venv/lib/python2.7/site-packages")
+      site.addsitedir("/srv/kallithea/venv/lib/python3.7/site-packages")
 
       ini = '/srv/kallithea/my.ini'
       from logging.config import fileConfig
@@ -624,7 +624,6 @@
 .. __: https://kallithea-scm.org/repos/kallithea/files/tip/init.d/ .
 
 
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
 .. _python: http://www.python.org/
 .. _Python regular expression documentation: https://docs.python.org/2/library/re.html
 .. _Mercurial: https://www.mercurial-scm.org/
--- a/docs/upgrade.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/upgrade.rst	Sat May 02 21:20:43 2020 +0200
@@ -241,6 +241,3 @@
 .. note::
     Kallithea does not use hooks on Mercurial repositories. This step is thus
     not necessary if you only have Mercurial repositories.
-
-
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
--- a/docs/usage/troubleshooting.rst	Thu Apr 09 18:03:56 2020 +0200
+++ b/docs/usage/troubleshooting.rst	Sat May 02 21:20:43 2020 +0200
@@ -8,7 +8,7 @@
 :A: Make sure either to set the ``static_files = true`` in the .ini file or
    double check the root path for your http setup. It should point to
    for example:
-   ``/home/my-virtual-python/lib/python2.7/site-packages/kallithea/public``
+   ``/home/my-virtual-python/lib/python3.7/site-packages/kallithea/public``
 
 |
 
@@ -67,7 +67,6 @@
     you have installed the latest Windows patches (especially KB2789397).
 
 
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
 .. _python: http://www.python.org/
 .. _mercurial: https://www.mercurial-scm.org/
 .. _celery: http://celeryproject.org/
--- a/kallithea/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -31,13 +31,16 @@
 import sys
 
 
-VERSION = (0, 5, 2)
+if sys.version_info < (3, 6):
+    raise Exception('Kallithea requires python 3.6 or later')
+
+VERSION = (0, 5, 99)
 BACKENDS = {
     'hg': 'Mercurial repository',
     'git': 'Git repository',
 }
 
-CELERY_ON = False
+CELERY_APP = None  # set to Celery app instance if using Celery
 CELERY_EAGER = False
 
 CONFIG = {}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/alembic/versions/a0a1bf09c143_db_add_ui_composite_index_and_drop_.py	Sat May 02 21:20:43 2020 +0200
@@ -0,0 +1,48 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""db: add Ui composite index and drop UniqueConstraint on Ui.ui_key
+
+Revision ID: a0a1bf09c143
+Revises: d7ec25b66e47
+Create Date: 2020-03-12 22:41:14.421837
+
+"""
+
+# The following opaque hexadecimal identifiers ("revisions") are used
+# by Alembic to track this migration script and its relations to others.
+revision = 'a0a1bf09c143'
+down_revision = 'd7ec25b66e47'
+branch_labels = None
+depends_on = None
+
+import sqlalchemy as sa
+from alembic import op
+
+
+def upgrade():
+    meta = sa.MetaData()
+    meta.reflect(bind=op.get_bind())
+
+    with op.batch_alter_table('ui', schema=None) as batch_op:
+        batch_op.create_index('ui_ui_section_ui_key_idx', ['ui_section', 'ui_key'], unique=False)
+        if any(i.name == 'uq_ui_ui_key' for i in meta.tables['ui'].constraints):
+            batch_op.drop_constraint('uq_ui_ui_key', type_='unique')
+        elif any(i.name == 'ui_ui_key_key' for i in meta.tables['ui'].constraints):  # table was created with old naming before 1a080d4e926e
+            batch_op.drop_constraint('ui_ui_key_key', type_='unique')
+
+
+def downgrade():
+    with op.batch_alter_table('ui', schema=None) as batch_op:
+        batch_op.create_unique_constraint('uq_ui_ui_key', ['ui_key'])
+        batch_op.drop_index('ui_ui_section_ui_key_idx')
--- a/kallithea/bin/base.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/base.py	Sat May 02 21:20:43 2020 +0200
@@ -29,9 +29,10 @@
 import pprint
 import random
 import sys
-import urllib2
+import urllib.request
 
-from kallithea.lib.compat import json
+from kallithea.lib import ext_json
+from kallithea.lib.utils2 import ascii_bytes
 
 
 CONFIG_NAME = '.config/kallithea'
@@ -67,12 +68,12 @@
         raise Exception('please specify method name !')
     apihost = apihost.rstrip('/')
     id_ = random.randrange(1, 9999)
-    req = urllib2.Request('%s/_admin/api' % apihost,
-                      data=json.dumps(_build_data(id_)),
+    req = urllib.request.Request('%s/_admin/api' % apihost,
+                      data=ascii_bytes(ext_json.dumps(_build_data(id_))),
                       headers={'content-type': 'text/plain'})
-    ret = urllib2.urlopen(req)
+    ret = urllib.request.urlopen(req)
     raw_json = ret.read()
-    json_data = json.loads(raw_json)
+    json_data = ext_json.loads(raw_json)
     id_ret = json_data['id']
     if id_ret == id_:
         return json_data
@@ -107,7 +108,7 @@
     def __getitem__(self, key):
         return self._conf[key]
 
-    def __nonzero__(self):
+    def __bool__(self):
         if self._conf:
             return True
         return False
@@ -128,7 +129,7 @@
         if os.path.exists(self._conf_name):
             update = True
         with open(self._conf_name, 'wb') as f:
-            json.dump(config, f, indent=4)
+            ext_json.dump(config, f, indent=4)
             f.write('\n')
 
         if update:
@@ -146,7 +147,7 @@
         config = {}
         try:
             with open(self._conf_name, 'rb') as conf:
-                config = json.load(conf)
+                config = ext_json.load(conf)
         except IOError as e:
             sys.stderr.write(str(e) + '\n')
 
@@ -159,7 +160,7 @@
         """
         try:
             with open(self._conf_name, 'rb') as conf:
-                return json.load(conf)
+                return ext_json.load(conf)
         except IOError as e:
             #sys.stderr.write(str(e) + '\n')
             pass
--- a/kallithea/bin/kallithea_api.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_api.py	Sat May 02 21:20:43 2020 +0200
@@ -25,12 +25,11 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-from __future__ import print_function
-
 import argparse
+import json
 import sys
 
-from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call, json
+from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call
 
 
 def argparser(argv):
@@ -60,7 +59,7 @@
                  'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
             default=FORMAT_PRETTY
     )
-    args, other = parser.parse_known_args()
+    args, other = parser.parse_known_args(args=argv[1:])
     return parser, args, other
 
 
@@ -101,7 +100,7 @@
         parser.error('Please specify method name')
 
     try:
-        margs = dict(map(lambda s: s.split(':', 1), other))
+        margs = dict(s.split(':', 1) for s in other)
     except ValueError:
         sys.stderr.write('Error parsing arguments \n')
         sys.exit()
--- a/kallithea/bin/kallithea_cli.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli.py	Sat May 02 21:20:43 2020 +0200
@@ -25,3 +25,8 @@
 import kallithea.bin.kallithea_cli_ssh
 # 'cli' is the main entry point for 'kallithea-cli', specified in setup.py as entry_points console_scripts
 from kallithea.bin.kallithea_cli_base import cli
+
+
+# mute pyflakes "imported but unused"
+assert kallithea.bin.kallithea_cli_ssh
+assert cli
--- a/kallithea/bin/kallithea_cli_base.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_base.py	Sat May 02 21:20:43 2020 +0200
@@ -12,7 +12,7 @@
 # You should have received a copy of the GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-import cStringIO
+import configparser
 import functools
 import logging.config
 import os
@@ -23,6 +23,7 @@
 import paste.deploy
 
 import kallithea
+import kallithea.config.middleware
 
 
 # kallithea_cli is usually invoked through the 'kallithea-cli' wrapper script
@@ -71,12 +72,12 @@
             def runtime_wrapper(config_file, *args, **kwargs):
                 path_to_ini_file = os.path.realpath(config_file)
                 kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file)
-                config_bytes = read_config(path_to_ini_file, strip_section_prefix=annotated.__name__)
-                logging.config.fileConfig(cStringIO.StringIO(config_bytes),
+                cp = configparser.ConfigParser(strict=False)
+                cp.read_string(read_config(path_to_ini_file, strip_section_prefix=annotated.__name__))
+                logging.config.fileConfig(cp,
                     {'__file__': path_to_ini_file, 'here': os.path.dirname(path_to_ini_file)})
                 if config_file_initialize_app:
-                    kallithea.config.middleware.make_app_without_logging(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
-                    kallithea.lib.utils.setup_cache_regions(kallithea.CONFIG)
+                    kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
                 return annotated(*args, **kwargs)
             return cli_command(runtime_wrapper)
         return annotator
--- a/kallithea/bin/kallithea_cli_celery.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_celery.py	Sat May 02 21:20:43 2020 +0200
@@ -12,6 +12,7 @@
 # You should have received a copy of the GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
+import celery.bin.worker
 import click
 
 import kallithea
@@ -31,10 +32,9 @@
     by this CLI command.
     """
 
-    if not kallithea.CELERY_ON:
+    if not kallithea.CELERY_APP:
         raise Exception('Please set use_celery = true in .ini config '
                         'file before running this command')
 
-    from kallithea.lib import celerypylons
-    cmd = celerypylons.worker.worker(celerypylons.app)
+    cmd = celery.bin.worker.worker(kallithea.CELERY_APP)
     return cmd.run_from_argv(None, command='celery-run -c CONFIG_FILE --', argv=list(celery_args))
--- a/kallithea/bin/kallithea_cli_config.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_config.py	Sat May 02 21:20:43 2020 +0200
@@ -89,6 +89,16 @@
     mako_variable_values.update({
         'uuid': lambda: uuid.uuid4().hex,
     })
+
+    click.echo('Creating config file using:')
+    for key, value in inifile.default_variables.items():
+        if isinstance(value, str):
+            options = inifile.variable_options.get(key)
+            if options:
+                click.echo('  %s=%s  (options: %s)' % (key, mako_variable_values.get(key, value), ', '.join(options)))
+            else:
+                click.echo('  %s=%s' % (key, mako_variable_values.get(key, value)))
+
     try:
         config_file_abs = os.path.abspath(config_file)
         inifile.create(config_file_abs, mako_variable_values, ini_settings)
--- a/kallithea/bin/kallithea_cli_db.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_db.py	Sat May 02 21:20:43 2020 +0200
@@ -67,7 +67,7 @@
     Session().commit()
 
     # initial repository scan
-    kallithea.config.middleware.make_app_without_logging(
+    kallithea.config.middleware.make_app(
             kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
     added, _ = kallithea.lib.utils.repo2db_mapper(kallithea.model.scm.ScmModel().repo_scan())
     if added:
--- a/kallithea/bin/kallithea_cli_iis.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_iis.py	Sat May 02 21:20:43 2020 +0200
@@ -67,6 +67,7 @@
 
     try:
         import isapi_wsgi
+        assert isapi_wsgi
     except ImportError:
         sys.stderr.write('missing requirement: isapi-wsgi not installed\n')
         sys.exit(1)
--- a/kallithea/bin/kallithea_cli_index.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_index.py	Sat May 02 21:20:43 2020 +0200
@@ -36,7 +36,7 @@
 @click.option('--repo-location', help='Base path of repositories to index. Default: all')
 @click.option('--index-only', help='Comma-separated list of repositories to build index on. Default: all')
 @click.option('--update-only', help='Comma-separated list of repositories to re-build index on. Default: all')
-@click.option('-f', '--full', 'full_index', help='Recreate the index from scratch')
+@click.option('-f', '--full/--no-full', 'full_index', help='Recreate the index from scratch')
 def index_create(repo_location, index_only, update_only, full_index):
     """Create or update full text search index"""
 
--- a/kallithea/bin/kallithea_cli_ishell.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_ishell.py	Sat May 02 21:20:43 2020 +0200
@@ -20,12 +20,10 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-from __future__ import print_function
-
 import sys
 
 import kallithea.bin.kallithea_cli_base as cli_base
-from kallithea.model.db import *
+from kallithea.model.db import *  # these names will be directly available in the IPython shell
 
 
 @cli_base.register_command(config_file_initialize_app=True)
--- a/kallithea/bin/kallithea_cli_repo.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_repo.py	Sat May 02 21:20:43 2020 +0200
@@ -26,10 +26,11 @@
 
 import click
 
+import kallithea
 import kallithea.bin.kallithea_cli_base as cli_base
 from kallithea.lib.utils import REMOVED_REPO_PAT, repo2db_mapper
-from kallithea.lib.utils2 import ask_ok, safe_str, safe_unicode
-from kallithea.model.db import Repository, Ui
+from kallithea.lib.utils2 import ask_ok
+from kallithea.model.db import Repository
 from kallithea.model.meta import Session
 from kallithea.model.scm import ScmModel
 
@@ -74,7 +75,7 @@
     if not repositories:
         repo_list = Repository.query().all()
     else:
-        repo_names = [safe_unicode(n.strip()) for n in repositories]
+        repo_names = [n.strip() for n in repositories]
         repo_list = list(Repository.query()
                         .filter(Repository.repo_name.in_(repo_names)))
 
@@ -110,7 +111,7 @@
             return
         parts = parts.groupdict()
         time_params = {}
-        for (name, param) in parts.iteritems():
+        for name, param in parts.items():
             if param:
                 time_params[name] = int(param)
         return datetime.timedelta(**time_params)
@@ -125,9 +126,9 @@
         date_part = name[4:19]  # 4:19 since we don't parse milliseconds
         return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S')
 
-    repos_location = Ui.get_repos_location()
+    repos_location = kallithea.CONFIG['base_path']
     to_remove = []
-    for dn_, dirs, f in os.walk(safe_str(repos_location)):
+    for dn_, dirs, f in os.walk(repos_location):
         alldirs = list(dirs)
         del dirs[:]
         if ('.hg' in alldirs or
@@ -175,9 +176,8 @@
         remove = True
     else:
         remove = ask_ok('The following repositories will be removed completely:\n%s\n'
-                'Do you want to proceed? [y/n] '
-                % '\n'.join(['%s deleted on %s' % (safe_str(x[0]), safe_str(x[1]))
-                                     for x in to_remove]))
+            'Do you want to proceed? [y/n] ' %
+            '\n'.join('%s deleted on %s' % (path, date_) for path, date_ in to_remove))
 
     if remove:
         for path, date_ in to_remove:
--- a/kallithea/bin/kallithea_cli_ssh.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_cli_ssh.py	Sat May 02 21:20:43 2020 +0200
@@ -14,7 +14,6 @@
 
 import logging
 import os
-import re
 import shlex
 import sys
 
--- a/kallithea/bin/kallithea_gist.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/kallithea_gist.py	Sat May 02 21:20:43 2020 +0200
@@ -25,15 +25,14 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-from __future__ import print_function
-
 import argparse
 import fileinput
+import json
 import os
 import stat
 import sys
 
-from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call, json
+from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call
 
 
 def argparser(argv):
@@ -69,7 +68,7 @@
                        'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
             default=FORMAT_PRETTY
     )
-    args, other = parser.parse_known_args()
+    args, other = parser.parse_known_args(args=argv[1:])
     return parser, args, other
 
 
--- a/kallithea/bin/ldap_sync.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/bin/ldap_sync.py	Sat May 02 21:20:43 2020 +0200
@@ -25,15 +25,14 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-from __future__ import print_function
-
-import urllib2
+import urllib.request
 import uuid
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
 
 import ldap
 
-from kallithea.lib.compat import json
+from kallithea.lib import ext_json
+from kallithea.lib.utils2 import ascii_bytes
 
 
 config = ConfigParser()
@@ -80,12 +79,12 @@
         uid = str(uuid.uuid1())
         data = self.get_api_data(uid, method, args)
 
-        data = json.dumps(data)
+        data = ascii_bytes(ext_json.dumps(data))
         headers = {'content-type': 'text/plain'}
-        req = urllib2.Request(self.url, data, headers)
+        req = urllib.request.Request(self.url, data, headers)
 
-        response = urllib2.urlopen(req)
-        response = json.load(response)
+        response = urllib.request.urlopen(req)
+        response = ext_json.load(response)
 
         if uid != response["id"]:
             raise InvalidResponseIDError("UUID does not match.")
--- a/kallithea/config/app_cfg.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/config/app_cfg.py	Sat May 02 21:20:43 2020 +0200
@@ -28,20 +28,21 @@
 from alembic.migration import MigrationContext
 from alembic.script.base import ScriptDirectory
 from sqlalchemy import create_engine
-from tg import hooks
 from tg.configuration import AppConfig
 from tg.support.converters import asbool
 
 import kallithea.lib.locale
 import kallithea.model.base
-from kallithea.lib.auth import set_available_permissions
+import kallithea.model.meta
+from kallithea.lib import celerypylons
 from kallithea.lib.middleware.https_fixup import HttpsFixup
 from kallithea.lib.middleware.permanent_repo_url import PermanentRepoUrl
 from kallithea.lib.middleware.simplegit import SimpleGit
 from kallithea.lib.middleware.simplehg import SimpleHg
 from kallithea.lib.middleware.wrapper import RequestWrapper
-from kallithea.lib.utils import check_git_version, load_rcextensions, make_ui, set_app_settings, set_indexer_config, set_vcs_config
+from kallithea.lib.utils import check_git_version, load_rcextensions, set_app_settings, set_indexer_config, set_vcs_config
 from kallithea.lib.utils2 import str2bool
+from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
@@ -98,17 +99,12 @@
         # Disable transaction manager -- currently Kallithea takes care of transactions itself
         self['tm.enabled'] = False
 
-        # Set the i18n source language so TG doesn't search beyond 'en' in Accept-Language.
-        # Don't force the default here if configuration force something else.
-        if not self.get('i18n.lang'):
-            self['i18n.lang'] = 'en'
+        # Set the default i18n source language so TG doesn't search beyond 'en' in Accept-Language.
+        self['i18n.lang'] = 'en'
 
 
 base_config = KallitheaAppConfig()
 
-# TODO still needed as long as we use pylonslib
-sys.modules['pylons'] = tg
-
 # DebugBar, a debug toolbar for TurboGears2.
 # (https://github.com/TurboGears/tgext.debugbar)
 # To enable it, install 'tgext.debugbar' and 'kajiki', and run Kallithea with
@@ -117,6 +113,7 @@
 try:
     from tgext.debugbar import enable_debugbar
     import kajiki # only to check its existence
+    assert kajiki
 except ImportError:
     pass
 else:
@@ -161,15 +158,14 @@
             sys.exit(1)
 
     # store some globals into kallithea
-    kallithea.CELERY_ON = str2bool(config.get('use_celery'))
-    kallithea.CELERY_EAGER = str2bool(config.get('celery.always.eager'))
+    kallithea.DEFAULT_USER_ID = db.User.get_default_user().user_id
+
+    if str2bool(config.get('use_celery')):
+        kallithea.CELERY_APP = celerypylons.make_app()
     kallithea.CONFIG = config
 
     load_rcextensions(root_path=config['here'])
 
-    set_available_permissions(config)
-    repos_path = make_ui().configitems('paths')[0][1]
-    config['base_path'] = repos_path
     set_app_settings(config)
 
     instance_id = kallithea.CONFIG.get('instance_id', '*')
@@ -188,8 +184,10 @@
 
     check_git_version()
 
+    kallithea.model.meta.Session.remove()
 
-hooks.register('configure_new_app', setup_configuration)
+
+tg.hooks.register('configure_new_app', setup_configuration)
 
 
 def setup_application(app):
@@ -213,4 +211,4 @@
     return app
 
 
-hooks.register('before_config', setup_application)
+tg.hooks.register('before_config', setup_application)
--- a/kallithea/config/conf.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/config/conf.py	Sat May 02 21:20:43 2020 +0200
@@ -35,7 +35,7 @@
 # Whoosh index targets
 
 # Extensions we want to index content of using whoosh
-INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys()
+INDEX_EXTENSIONS = list(LANGUAGES_EXTENSIONS_MAP)
 
 # Filenames we want to index content of using whoosh
 INDEX_FILENAMES = pygmentsutils.get_index_filenames()
--- a/kallithea/config/middleware.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/config/middleware.py	Sat May 02 21:20:43 2020 +0200
@@ -24,11 +24,6 @@
 make_base_app = base_config.setup_tg_wsgi_app(load_environment)
 
 
-def make_app_without_logging(global_conf, full_stack=True, **app_conf):
-    """The core of make_app for use from gearbox commands (other than 'serve')"""
-    return make_base_app(global_conf, full_stack=full_stack, **app_conf)
-
-
 def make_app(global_conf, full_stack=True, **app_conf):
     """
     Set up Kallithea with the settings found in the PasteDeploy configuration
@@ -47,4 +42,6 @@
     ``app_conf`` contains all the application-specific settings (those defined
     under ``[app:main]``.
     """
-    return make_app_without_logging(global_conf, full_stack=full_stack, **app_conf)
+    assert app_conf.get('sqlalchemy.url')  # must be called with a Kallithea .ini file, which for example must have this config option
+    assert global_conf.get('here') and global_conf.get('__file__')  # app config should be initialized the paste way ...
+    return make_base_app(global_conf, full_stack=full_stack, **app_conf)
--- a/kallithea/config/routing.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/config/routing.py	Sat May 02 21:20:43 2020 +0200
@@ -19,14 +19,34 @@
 refer to the routes manual at http://routes.groovie.org/docs/
 """
 
-from routes import Mapper
+import routes
 from tg import request
 
+from kallithea.lib.utils2 import safe_str
+
 
 # prefix for non repository related links needs to be prefixed with `/`
 ADMIN_PREFIX = '/_admin'
 
 
+class Mapper(routes.Mapper):
+    """
+    Subclassed Mapper with routematch patched to decode "unicode" str url to
+    *real* unicode str before applying matches and invoking controller methods.
+    """
+
+    def routematch(self, url=None, environ=None):
+        """
+        routematch that also decode url from "fake bytes" to real unicode
+        string before matching and invoking controllers.
+        """
+        # Process url like get_path_info does ... but PATH_INFO has already
+        # been retrieved from environ and is passed, so - let's just use that
+        # instead.
+        url = safe_str(url.encode('latin1'))
+        return super().routematch(url=url, environ=environ)
+
+
 def make_map(config):
     """Create, configure and return the routes Mapper"""
     rmap = Mapper(directory=config['paths']['controllers'],
@@ -86,7 +106,7 @@
     #==========================================================================
 
     # MAIN PAGE
-    rmap.connect('home', '/', controller='home', action='index')
+    rmap.connect('home', '/', controller='home')
     rmap.connect('about', '/about', controller='home', action='about')
     rmap.redirect('/favicon.ico', '/images/favicon.ico')
     rmap.connect('repo_switcher_data', '/_repos', controller='home',
@@ -106,7 +126,7 @@
         m.connect("repos", "/repos",
                   action="create", conditions=dict(method=["POST"]))
         m.connect("repos", "/repos",
-                  action="index", conditions=dict(method=["GET"]))
+                  conditions=dict(method=["GET"]))
         m.connect("new_repo", "/create_repository",
                   action="create_repository", conditions=dict(method=["GET"]))
         m.connect("update_repo", "/repos/{repo_name:.*?}",
@@ -121,7 +141,7 @@
         m.connect("repos_groups", "/repo_groups",
                   action="create", conditions=dict(method=["POST"]))
         m.connect("repos_groups", "/repo_groups",
-                  action="index", conditions=dict(method=["GET"]))
+                  conditions=dict(method=["GET"]))
         m.connect("new_repos_group", "/repo_groups/new",
                   action="new", conditions=dict(method=["GET"]))
         m.connect("update_repos_group", "/repo_groups/{group_name:.*?}",
@@ -161,9 +181,9 @@
         m.connect("new_user", "/users/new",
                   action="create", conditions=dict(method=["POST"]))
         m.connect("users", "/users",
-                  action="index", conditions=dict(method=["GET"]))
+                  conditions=dict(method=["GET"]))
         m.connect("formatted_users", "/users.{format}",
-                  action="index", conditions=dict(method=["GET"]))
+                  conditions=dict(method=["GET"]))
         m.connect("new_user", "/users/new",
                   action="new", conditions=dict(method=["GET"]))
         m.connect("update_user", "/users/{id}",
@@ -216,7 +236,7 @@
         m.connect("users_groups", "/user_groups",
                   action="create", conditions=dict(method=["POST"]))
         m.connect("users_groups", "/user_groups",
-                  action="index", conditions=dict(method=["GET"]))
+                  conditions=dict(method=["GET"]))
         m.connect("new_users_group", "/user_groups/new",
                   action="new", conditions=dict(method=["GET"]))
         m.connect("update_users_group", "/user_groups/{id}",
@@ -263,8 +283,7 @@
     # ADMIN DEFAULTS ROUTES
     with rmap.submapper(path_prefix=ADMIN_PREFIX,
                         controller='admin/defaults') as m:
-        m.connect('defaults', '/defaults',
-                  action="index")
+        m.connect('defaults', '/defaults')
         m.connect('defaults_update', 'defaults/{id}/update',
                   action="update", conditions=dict(method=["POST"]))
 
@@ -370,7 +389,7 @@
         m.connect("gists", "/gists",
                   action="create", conditions=dict(method=["POST"]))
         m.connect("gists", "/gists",
-                  action="index", conditions=dict(method=["GET"]))
+                  conditions=dict(method=["GET"]))
         m.connect("new_gist", "/gists/new",
                   action="new", conditions=dict(method=["GET"]))
 
@@ -396,7 +415,7 @@
     # ADMIN MAIN PAGES
     with rmap.submapper(path_prefix=ADMIN_PREFIX,
                         controller='admin/admin') as m:
-        m.connect('admin_home', '', action='index')
+        m.connect('admin_home', '')
         m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9. _-]*}',
                   action='add_repo')
     #==========================================================================
@@ -408,7 +427,7 @@
 
     # USER JOURNAL
     rmap.connect('journal', '%s/journal' % ADMIN_PREFIX,
-                 controller='journal', action='index')
+                 controller='journal')
     rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX,
                  controller='journal', action='journal_rss')
     rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX,
@@ -475,7 +494,7 @@
     #==========================================================================
     rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating',
                 controller='admin/repos', action='repo_creating')
-    rmap.connect('repo_check_home', '/{repo_name:.*?}/crepo_check',
+    rmap.connect('repo_check_home', '/{repo_name:.*?}/repo_check_creating',
                 controller='admin/repos', action='repo_check')
 
     rmap.connect('summary_home', '/{repo_name:.*?}',
@@ -544,13 +563,6 @@
                  controller='admin/repos', action="edit_advanced_fork",
                  conditions=dict(method=["POST"], function=check_repo))
 
-    rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches",
-                 controller='admin/repos', action="edit_caches",
-                 conditions=dict(method=["GET"], function=check_repo))
-    rmap.connect("update_repo_caches", "/{repo_name:.*?}/settings/caches",
-                 controller='admin/repos', action="edit_caches",
-                 conditions=dict(method=["POST"], function=check_repo))
-
     rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
                  controller='admin/repos', action="edit_remote",
                  conditions=dict(method=["GET"], function=check_repo))
@@ -602,7 +614,7 @@
 
     rmap.connect('compare_home',
                  '/{repo_name:.*?}/compare',
-                 controller='compare', action='index',
+                 controller='compare',
                  conditions=dict(function=check_repo))
 
     rmap.connect('compare_url',
@@ -616,7 +628,7 @@
 
     rmap.connect('pullrequest_home',
                  '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
-                 action='index', conditions=dict(function=check_repo,
+                 conditions=dict(function=check_repo,
                                                  method=["GET"]))
 
     rmap.connect('pullrequest_repo_info',
@@ -674,7 +686,7 @@
                 controller='changelog', conditions=dict(function=check_repo))
 
     rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}',
-                controller='changelog', f_path=None,
+                controller='changelog',
                 conditions=dict(function=check_repo))
 
     rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}',
@@ -719,8 +731,8 @@
 
     rmap.connect('files_annotate_home',
                  '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}',
-                 controller='files', action='index', revision='tip',
-                 f_path='', annotate=True, conditions=dict(function=check_repo))
+                 controller='files', revision='tip',
+                 f_path='', annotate='1', conditions=dict(function=check_repo))
 
     rmap.connect('files_edit_home',
                  '/{repo_name:.*?}/edit/{revision}/{f_path:.*}',
--- a/kallithea/controllers/admin/admin.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/admin.py	Sat May 02 21:20:43 2020 +0200
@@ -36,7 +36,6 @@
 from whoosh.qparser.dateparse import DateParserPlugin
 from whoosh.qparser.default import QueryParser
 
-from kallithea.config.routing import url
 from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
 from kallithea.lib.base import BaseController, render
 from kallithea.lib.indexers import JOURNAL_SCHEMA
@@ -61,7 +60,7 @@
     if search_term:
         qp = QueryParser('repository', schema=JOURNAL_SCHEMA)
         qp.add_plugin(DateParserPlugin())
-        qry = qp.parse(unicode(search_term))
+        qry = qp.parse(search_term)
         log.debug('Filtering using parsed query %r', qry)
 
     def wildcard_handler(col, wc_term):
@@ -139,10 +138,8 @@
 
         p = safe_int(request.GET.get('page'), 1)
 
-        def url_generator(**kw):
-            return url.current(filter=c.search_term, **kw)
-
-        c.users_log = Page(users_log, page=p, items_per_page=10, url=url_generator)
+        c.users_log = Page(users_log, page=p, items_per_page=10,
+                           filter=c.search_term)
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
             return render('admin/admin_log.html')
--- a/kallithea/controllers/admin/auth_settings.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/auth_settings.py	Sat May 02 21:20:43 2020 +0200
@@ -37,7 +37,6 @@
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
 from kallithea.lib.base import BaseController, render
-from kallithea.lib.compat import formatted_json
 from kallithea.model.db import Setting
 from kallithea.model.forms import AuthSettingsForm
 from kallithea.model.meta import Session
@@ -87,7 +86,7 @@
         # we want to show , separated list of enabled plugins
         c.defaults['auth_plugins'] = ','.join(c.enabled_plugin_names)
 
-        log.debug(formatted_json(defaults))
+        log.debug('defaults: %s', defaults)
         return formencode.htmlfill.render(
             render('admin/auth/auth_settings.html'),
             defaults=c.defaults,
@@ -103,7 +102,7 @@
     def auth_settings(self):
         """POST create and store auth settings"""
         self.__load_defaults()
-        log.debug("POST Result: %s", formatted_json(dict(request.POST)))
+        log.debug("POST Result: %s", dict(request.POST))
 
         # First, parse only the plugin list (not the plugin settings).
         _auth_plugins_validator = AuthSettingsForm([]).fields['auth_plugins']
--- a/kallithea/controllers/admin/defaults.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/defaults.py	Sat May 02 21:20:43 2020 +0200
@@ -31,7 +31,6 @@
 import formencode
 from formencode import htmlfill
 from tg import request
-from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound
 
@@ -69,7 +68,7 @@
 
         try:
             form_result = _form.to_python(dict(request.POST))
-            for k, v in form_result.iteritems():
+            for k, v in form_result.items():
                 setting = Setting.create_or_update(k, v)
             Session().commit()
             h.flash(_('Default settings updated successfully'),
--- a/kallithea/controllers/admin/gists.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/gists.py	Sat May 02 21:20:43 2020 +0200
@@ -40,7 +40,7 @@
 from kallithea.lib.auth import LoginRequired
 from kallithea.lib.base import BaseController, jsonify, render
 from kallithea.lib.page import Page
-from kallithea.lib.utils2 import safe_int, safe_unicode, time_to_datetime
+from kallithea.lib.utils2 import safe_int, safe_str, time_to_datetime
 from kallithea.lib.vcs.exceptions import NodeNotChangedError, VCSError
 from kallithea.model.db import Gist
 from kallithea.model.forms import GistForm
@@ -71,6 +71,11 @@
         not_default_user = not request.authuser.is_default_user
         c.show_private = request.GET.get('private') and not_default_user
         c.show_public = request.GET.get('public') and not_default_user
+        url_params = {}
+        if c.show_public:
+            url_params['public'] = 1
+        elif c.show_private:
+            url_params['private'] = 1
 
         gists = Gist().query() \
             .filter_by(is_expired=False) \
@@ -97,7 +102,8 @@
 
         c.gists = gists
         p = safe_int(request.GET.get('page'), 1)
-        c.gists_pager = Page(c.gists, page=p, items_per_page=10)
+        c.gists_pager = Page(c.gists, page=p, items_per_page=10,
+                             **url_params)
         return render('admin/gists/index.html')
 
     @LoginRequired()
@@ -176,7 +182,10 @@
             log.error(traceback.format_exc())
             raise HTTPNotFound()
         if format == 'raw':
-            content = '\n\n'.join([f.content for f in c.files if (f_path is None or safe_unicode(f.path) == f_path)])
+            content = '\n\n'.join(
+                safe_str(f.content)
+                for f in c.files if (f_path is None or f.path == f_path)
+            )
             response.content_type = 'text/plain'
             return content
         return render('admin/gists/show.html')
--- a/kallithea/controllers/admin/my_account.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/my_account.py	Sat May 02 21:20:43 2020 +0200
@@ -279,8 +279,8 @@
             Session().commit()
             SshKeyModel().write_authorized_keys()
             h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
-        except SshKeyModelException as errors:
-            h.flash(errors.message, category='error')
+        except SshKeyModelException as e:
+            h.flash(e.args[0], category='error')
         raise HTTPFound(location=url('my_account_ssh_keys'))
 
     @IfSshEnabled
@@ -291,6 +291,6 @@
             Session().commit()
             SshKeyModel().write_authorized_keys()
             h.flash(_("SSH key successfully deleted"), category='success')
-        except SshKeyModelException as errors:
-            h.flash(errors.message, category='error')
+        except SshKeyModelException as e:
+            h.flash(e.args[0], category='error')
         raise HTTPFound(location=url('my_account_ssh_keys'))
--- a/kallithea/controllers/admin/repo_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/repo_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -25,7 +25,6 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import itertools
 import logging
 import traceback
 
@@ -37,7 +36,6 @@
 from tg.i18n import ungettext
 from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound
 
-import kallithea
 from kallithea.config.routing import url
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoGroupPermissionLevelDecorator, LoginRequired
@@ -93,10 +91,8 @@
         return data
 
     def _revoke_perms_on_yourself(self, form_result):
-        _up = filter(lambda u: request.authuser.username == u[0],
-                     form_result['perms_updates'])
-        _new = filter(lambda u: request.authuser.username == u[0],
-                      form_result['perms_new'])
+        _up = [u for u in form_result['perms_updates'] if request.authuser.username == u[0]]
+        _new = [u for u in form_result['perms_new'] if request.authuser.username == u[0]]
         if _new and _new[0][1] != 'group.admin' or _up and _up[0][1] != 'group.admin':
             return True
         return False
@@ -105,24 +101,20 @@
         _list = RepoGroup.query(sorted=True).all()
         group_iter = RepoGroupList(_list, perm_level='admin')
         repo_groups_data = []
-        total_records = len(group_iter)
         _tmpl_lookup = app_globals.mako_lookup
         template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 
-        repo_group_name = lambda repo_group_name, children_groups: (
-            template.get_def("repo_group_name")
-            .render(repo_group_name, children_groups, _=_, h=h, c=c)
-        )
-        repo_group_actions = lambda repo_group_id, repo_group_name, gr_count: (
-            template.get_def("repo_group_actions")
-            .render(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c,
-                    ungettext=ungettext)
-        )
+        def repo_group_name(repo_group_name, children_groups):
+            return template.get_def("repo_group_name") \
+                .render_unicode(repo_group_name, children_groups, _=_, h=h, c=c)
+
+        def repo_group_actions(repo_group_id, repo_group_name, gr_count):
+            return template.get_def("repo_group_actions") \
+                .render_unicode(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c,
+                        ungettext=ungettext)
 
         for repo_gr in group_iter:
-            children_groups = map(h.safe_unicode,
-                itertools.chain((g.name for g in repo_gr.parents),
-                                (x.name for x in [repo_gr])))
+            children_groups = [g.name for g in repo_gr.parents] + [repo_gr.name]
             repo_count = repo_gr.repositories.count()
             repo_groups_data.append({
                 "raw_name": repo_gr.group_name,
--- a/kallithea/controllers/admin/repos.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/repos.py	Sat May 02 21:20:43 2020 +0200
@@ -28,6 +28,7 @@
 import logging
 import traceback
 
+import celery.result
 import formencode
 from formencode import htmlfill
 from tg import request
@@ -35,6 +36,7 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound
 
+import kallithea
 from kallithea.config.routing import url
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import HasPermissionAny, HasRepoPermissionLevelDecorator, LoginRequired, NotAnonymous
@@ -43,7 +45,7 @@
 from kallithea.lib.utils import action_logger
 from kallithea.lib.utils2 import safe_int
 from kallithea.lib.vcs import RepositoryError
-from kallithea.model.db import RepoGroup, Repository, RepositoryField, Setting, User, UserFollowing
+from kallithea.model.db import RepoGroup, Repository, RepositoryField, Setting, UserFollowing
 from kallithea.model.forms import RepoFieldForm, RepoForm, RepoPermsForm
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
@@ -110,17 +112,11 @@
     @NotAnonymous()
     def create(self):
         self.__load_defaults()
-        form_result = {}
         try:
             # CanWriteGroup validators checks permissions of this POST
             form_result = RepoForm(repo_groups=c.repo_groups,
                                    landing_revs=c.landing_revs_choices)() \
                             .to_python(dict(request.POST))
-
-            # create is done sometimes async on celery, db transaction
-            # management is handled there.
-            task = RepoModel().create(form_result, request.authuser.user_id)
-            task_id = task.task_id
         except formencode.Invalid as errors:
             log.info(errors)
             return htmlfill.render(
@@ -131,6 +127,11 @@
                 force_defaults=False,
                 encoding="UTF-8")
 
+        try:
+            # create is done sometimes async on celery, db transaction
+            # management is handled there.
+            task = RepoModel().create(form_result, request.authuser.user_id)
+            task_id = task.task_id
         except Exception:
             log.error(traceback.format_exc())
             msg = (_('Error creating repository %s')
@@ -181,12 +182,10 @@
         task_id = request.GET.get('task_id')
 
         if task_id and task_id not in ['None']:
-            from kallithea import CELERY_ON
-            from kallithea.lib import celerypylons
-            if CELERY_ON:
-                task = celerypylons.result.AsyncResult(task_id)
-                if task.failed():
-                    raise HTTPInternalServerError(task.traceback)
+            if kallithea.CELERY_APP:
+                task_result = celery.result.AsyncResult(task_id, app=kallithea.CELERY_APP)
+                if task_result.failed():
+                    raise HTTPInternalServerError(task_result.traceback)
 
         repo = Repository.get_by_repo_name(repo_name)
         if repo and repo.repo_state == Repository.STATE_CREATED:
@@ -406,7 +405,7 @@
     @HasRepoPermissionLevelDecorator('admin')
     def edit_advanced(self, repo_name):
         c.repo_info = self._load_repo()
-        c.default_user_id = User.get_default_user().user_id
+        c.default_user_id = kallithea.DEFAULT_USER_ID
         c.in_public_journal = UserFollowing.query() \
             .filter(UserFollowing.user_id == c.default_user_id) \
             .filter(UserFollowing.follows_repository == c.repo_info).scalar()
@@ -443,7 +442,7 @@
 
         try:
             repo_id = Repository.get_by_repo_name(repo_name).repo_id
-            user_id = User.get_default_user().user_id
+            user_id = kallithea.DEFAULT_USER_ID
             self.scm_model.toggle_following_repo(repo_id, user_id)
             h.flash(_('Updated repository visibility in public journal'),
                     category='success')
@@ -471,7 +470,7 @@
                     category='success')
         except RepositoryError as e:
             log.error(traceback.format_exc())
-            h.flash(str(e), category='error')
+            h.flash(e, category='error')
         except Exception as e:
             log.error(traceback.format_exc())
             h.flash(_('An error occurred during this operation'),
@@ -480,24 +479,6 @@
         raise HTTPFound(location=url('edit_repo_advanced', repo_name=repo_name))
 
     @HasRepoPermissionLevelDecorator('admin')
-    def edit_caches(self, repo_name):
-        c.repo_info = self._load_repo()
-        c.active = 'caches'
-        if request.POST:
-            try:
-                ScmModel().mark_for_invalidation(repo_name)
-                Session().commit()
-                h.flash(_('Cache invalidation successful'),
-                        category='success')
-            except Exception as e:
-                log.error(traceback.format_exc())
-                h.flash(_('An error occurred during cache invalidation'),
-                        category='error')
-
-            raise HTTPFound(location=url('edit_repo_caches', repo_name=c.repo_name))
-        return render('admin/repos/repo_edit.html')
-
-    @HasRepoPermissionLevelDecorator('admin')
     def edit_remote(self, repo_name):
         c.repo_info = self._load_repo()
         c.active = 'remote'
--- a/kallithea/controllers/admin/settings.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/settings.py	Sat May 02 21:20:43 2020 +0200
@@ -42,7 +42,7 @@
 from kallithea.lib.celerylib import tasks
 from kallithea.lib.exceptions import HgsubversionImportError
 from kallithea.lib.utils import repo2db_mapper, set_app_settings
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import safe_str
 from kallithea.lib.vcs import VCSError
 from kallithea.model.db import Repository, Setting, Ui
 from kallithea.model.forms import ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm
@@ -120,6 +120,7 @@
                 if sett.ui_active:
                     try:
                         import hgsubversion  # pragma: no cover
+                        assert hgsubversion
                     except ImportError:
                         raise HgsubversionImportError
 
@@ -168,10 +169,10 @@
                                             user=request.authuser.username,
                                             overwrite_git_hooks=overwrite_git_hooks)
             added_msg = h.HTML(', ').join(
-                h.link_to(safe_unicode(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added
+                h.link_to(safe_str(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added
             ) or '-'
             removed_msg = h.HTML(', ').join(
-                safe_unicode(repo_name) for repo_name in removed
+                safe_str(repo_name) for repo_name in removed
             ) or '-'
             h.flash(h.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) %
                     (added_msg, removed_msg), category='success')
@@ -423,7 +424,7 @@
         import kallithea
         c.ini = kallithea.CONFIG
         server_info = Setting.get_server_info()
-        for key, val in server_info.iteritems():
+        for key, val in server_info.items():
             setattr(c, key, val)
 
         return htmlfill.render(
--- a/kallithea/controllers/admin/user_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/user_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -32,19 +32,18 @@
 from formencode import htmlfill
 from sqlalchemy.orm import joinedload
 from sqlalchemy.sql.expression import func
-from tg import app_globals, config, request
+from tg import app_globals, request
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound, HTTPInternalServerError
 
-import kallithea
 from kallithea.config.routing import url
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import HasPermissionAnyDecorator, HasUserGroupPermissionLevelDecorator, LoginRequired
 from kallithea.lib.base import BaseController, render
 from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException
 from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import safe_int, safe_unicode
+from kallithea.lib.utils2 import safe_int, safe_str
 from kallithea.model.db import User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm
 from kallithea.model.forms import CustomDefaultPermissionsForm, UserGroupForm, UserGroupPermsForm
 from kallithea.model.meta import Session
@@ -61,7 +60,6 @@
     @LoginRequired(allow_default_user=True)
     def _before(self, *args, **kwargs):
         super(UserGroupsController, self)._before(*args, **kwargs)
-        c.available_permissions = config['available_permissions']
 
     def __load_data(self, user_group_id):
         c.group_members_obj = sorted((x.user for x in c.user_group.members),
@@ -88,20 +86,18 @@
                         .all()
         group_iter = UserGroupList(_list, perm_level='admin')
         user_groups_data = []
-        total_records = len(group_iter)
         _tmpl_lookup = app_globals.mako_lookup
         template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 
-        user_group_name = lambda user_group_id, user_group_name: (
-            template.get_def("user_group_name")
-            .render(user_group_id, user_group_name, _=_, h=h, c=c)
-        )
-        user_group_actions = lambda user_group_id, user_group_name: (
-            template.get_def("user_group_actions")
-            .render(user_group_id, user_group_name, _=_, h=h, c=c)
-        )
+        def user_group_name(user_group_id, user_group_name):
+            return template.get_def("user_group_name") \
+                .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
+
+        def user_group_actions(user_group_id, user_group_name):
+            return template.get_def("user_group_actions") \
+                .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
+
         for user_gr in group_iter:
-
             user_groups_data.append({
                 "raw_name": user_gr.users_group_name,
                 "group_name": user_group_name(user_gr.users_group_id,
@@ -163,7 +159,7 @@
         c.active = 'settings'
         self.__load_data(id)
 
-        available_members = [safe_unicode(x[0]) for x in c.available_members]
+        available_members = [safe_str(x[0]) for x in c.available_members]
 
         users_group_form = UserGroupForm(edit=True,
                                          old_data=c.user_group.get_dict(),
--- a/kallithea/controllers/admin/users.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/admin/users.py	Sat May 02 21:20:43 2020 +0200
@@ -31,7 +31,7 @@
 import formencode
 from formencode import htmlfill
 from sqlalchemy.sql.expression import func
-from tg import app_globals, config, request
+from tg import app_globals, request
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound, HTTPNotFound
@@ -63,7 +63,6 @@
     @HasPermissionAnyDecorator('hg.admin')
     def _before(self, *args, **kwargs):
         super(UsersController, self)._before(*args, **kwargs)
-        c.available_permissions = config['available_permissions']
 
     def index(self, format='html'):
         c.users_list = User.query().order_by(User.username) \
@@ -72,19 +71,18 @@
                         .all()
 
         users_data = []
-        total_records = len(c.users_list)
         _tmpl_lookup = app_globals.mako_lookup
         template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 
         grav_tmpl = '<div class="gravatar">%s</div>'
 
-        username = lambda user_id, username: (
-                template.get_def("user_name")
-                .render(user_id, username, _=_, h=h, c=c))
+        def username(user_id, username):
+            return template.get_def("user_name") \
+                .render_unicode(user_id, username, _=_, h=h, c=c)
 
-        user_actions = lambda user_id, username: (
-                template.get_def("user_actions")
-                .render(user_id, username, _=_, h=h, c=c))
+        def user_actions(user_id, username):
+            return template.get_def("user_actions") \
+                .render_unicode(user_id, username, _=_, h=h, c=c)
 
         for user in c.users_list:
             users_data.append({
@@ -390,7 +388,7 @@
             .filter(UserIpMap.user == c.user).all()
 
         c.default_user_ip_map = UserIpMap.query() \
-            .filter(UserIpMap.user == User.get_default_user()).all()
+            .filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all()
 
         defaults = c.user.get_dict()
         return htmlfill.render(
@@ -454,8 +452,8 @@
             Session().commit()
             SshKeyModel().write_authorized_keys()
             h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
-        except SshKeyModelException as errors:
-            h.flash(errors.message, category='error')
+        except SshKeyModelException as e:
+            h.flash(e.args[0], category='error')
         raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
 
     @IfSshEnabled
@@ -468,6 +466,6 @@
             Session().commit()
             SshKeyModel().write_authorized_keys()
             h.flash(_("SSH key successfully deleted"), category='success')
-        except SshKeyModelException as errors:
-            h.flash(errors.message, category='error')
+        except SshKeyModelException as e:
+            h.flash(e.args[0], category='error')
         raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
--- a/kallithea/controllers/api/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/api/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -35,11 +35,11 @@
 from tg import Response, TGController, request, response
 from webob.exc import HTTPError, HTTPException
 
+from kallithea.lib import ext_json
 from kallithea.lib.auth import AuthUser
-from kallithea.lib.base import _get_access_path
 from kallithea.lib.base import _get_ip_addr as _get_ip
-from kallithea.lib.compat import json
-from kallithea.lib.utils2 import safe_str, safe_unicode
+from kallithea.lib.base import get_path_info
+from kallithea.lib.utils2 import ascii_bytes
 from kallithea.model.db import User
 
 
@@ -53,7 +53,7 @@
         super(JSONRPCError, self).__init__()
 
     def __str__(self):
-        return safe_str(self.message)
+        return self.message
 
 
 class JSONRPCErrorResponse(Response, HTTPException):
@@ -121,7 +121,7 @@
         raw_body = environ['wsgi.input'].read(length)
 
         try:
-            json_body = json.loads(raw_body)
+            json_body = ext_json.loads(raw_body)
         except ValueError as e:
             # catch JSON errors Here
             raise JSONRPCErrorResponse(retid=self._req_id,
@@ -166,13 +166,13 @@
 
         # now that we have a method, add self._req_params to
         # self.kargs and dispatch control to WGIController
-        argspec = inspect.getargspec(self._func)
-        arglist = argspec[0][1:]
-        defaults = map(type, argspec[3] or [])
-        default_empty = types.NotImplementedType
+        argspec = inspect.getfullargspec(self._func)
+        arglist = argspec.args[1:]
+        argtypes = [type(arg) for arg in argspec.defaults or []]
+        default_empty = type(NotImplemented)
 
         # kw arguments required by this method
-        func_kwargs = dict(itertools.izip_longest(reversed(arglist), reversed(defaults),
+        func_kwargs = dict(itertools.zip_longest(reversed(arglist), reversed(argtypes),
                                                   fillvalue=default_empty))
 
         # This attribute will need to be first param of a method that uses
@@ -180,7 +180,7 @@
         USER_SESSION_ATTR = 'apiuser'
 
         # get our arglist and check if we provided them as args
-        for arg, default in func_kwargs.iteritems():
+        for arg, default in func_kwargs.items():
             if arg == USER_SESSION_ATTR:
                 # USER_SESSION_ATTR is something translated from API key and
                 # this is checked before so we don't need validate it
@@ -209,7 +209,7 @@
 
         log.info('IP: %s Request to %s time: %.3fs' % (
             self._get_ip_addr(environ),
-            safe_unicode(_get_access_path(environ)), time.time() - start)
+            get_path_info(environ), time.time() - start)
         )
 
         state.set_action(self._rpc_call, [])
@@ -226,28 +226,28 @@
             if isinstance(raw_response, HTTPError):
                 self._error = str(raw_response)
         except JSONRPCError as e:
-            self._error = safe_str(e)
+            self._error = str(e)
         except Exception as e:
             log.error('Encountered unhandled exception: %s',
                       traceback.format_exc(),)
             json_exc = JSONRPCError('Internal server error')
-            self._error = safe_str(json_exc)
+            self._error = str(json_exc)
 
         if self._error is not None:
             raw_response = None
 
         response = dict(id=self._req_id, result=raw_response, error=self._error)
         try:
-            return json.dumps(response)
+            return ascii_bytes(ext_json.dumps(response))
         except TypeError as e:
-            log.error('API FAILED. Error encoding response: %s', e)
-            return json.dumps(
+            log.error('API FAILED. Error encoding response for %s %s: %s\n%s', action, rpc_args, e, traceback.format_exc())
+            return ascii_bytes(ext_json.dumps(
                 dict(
                     id=self._req_id,
                     result=None,
-                    error="Error encoding response"
+                    error="Error encoding response",
                 )
-            )
+            ))
 
     def _find_method(self):
         """
--- a/kallithea/controllers/api/api.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/api/api.py	Sat May 02 21:20:43 2020 +0200
@@ -32,8 +32,8 @@
 from tg import request
 
 from kallithea.controllers.api import JSONRPCController, JSONRPCError
-from kallithea.lib.auth import (
-    AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel)
+from kallithea.lib.auth import (AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel,
+                                HasUserGroupPermissionLevel)
 from kallithea.lib.exceptions import DefaultUserException, UserGroupsAssignedException
 from kallithea.lib.utils import action_logger, repo2db_mapper
 from kallithea.lib.utils2 import OAttr, Optional
@@ -433,7 +433,7 @@
 
     @HasPermissionAnyDecorator('hg.admin')
     def create_user(self, username, email, password=Optional(''),
-                    firstname=Optional(u''), lastname=Optional(u''),
+                    firstname=Optional(''), lastname=Optional(''),
                     active=Optional(True), admin=Optional(False),
                     extern_type=Optional(User.DEFAULT_AUTH_TYPE),
                     extern_name=Optional('')):
@@ -686,7 +686,7 @@
         ]
 
     @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
-    def create_user_group(self, group_name, description=Optional(u''),
+    def create_user_group(self, group_name, description=Optional(''),
                           owner=Optional(OAttr('apiuser')), active=Optional(True)):
         """
         Creates new user group. This command can be executed only using api_key
@@ -1160,7 +1160,7 @@
             return _map[ret_type]
         except KeyError:
             raise JSONRPCError('ret_type must be one of %s'
-                               % (','.join(_map.keys())))
+                               % (','.join(sorted(_map))))
         except Exception:
             log.error(traceback.format_exc())
             raise JSONRPCError(
@@ -2339,7 +2339,7 @@
                                                  branch_name,
                                                  reverse, max_revisions)]
         except EmptyRepositoryError as e:
-            raise JSONRPCError(e.message)
+            raise JSONRPCError('Repository is empty')
 
     # permission check inside
     def get_changeset(self, repoid, raw_id, with_reviews=Optional(False)):
@@ -2373,7 +2373,7 @@
         return pull_request.get_api_data()
 
     # permission check inside
-    def comment_pullrequest(self, pull_request_id, comment_msg=u'', status=None, close_pr=False):
+    def comment_pullrequest(self, pull_request_id, comment_msg='', status=None, close_pr=False):
         """
         Add comment, close and change status of pull request.
         """
@@ -2400,7 +2400,7 @@
             pull_request=pull_request.pull_request_id,
             f_path=None,
             line_no=None,
-            status_change=(ChangesetStatus.get_status_lbl(status)),
+            status_change=ChangesetStatus.get_status_lbl(status),
             closing_pr=close_pr
         )
         action_logger(apiuser,
--- a/kallithea/controllers/changelog.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/changelog.py	Sat May 02 21:20:43 2020 +0200
@@ -38,8 +38,8 @@
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.graphmod import graph_data
-from kallithea.lib.page import RepoPage
-from kallithea.lib.utils2 import safe_int, safe_str
+from kallithea.lib.page import Page
+from kallithea.lib.utils2 import safe_int
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, NodeDoesNotExistError, RepositoryError
 
 
@@ -67,7 +67,7 @@
             h.flash(_('There are no changesets yet'), category='error')
         except RepositoryError as e:
             log.error(traceback.format_exc())
-            h.flash(safe_str(e), category='error')
+            h.flash(e, category='error')
         raise HTTPBadRequest()
 
     @LoginRequired(allow_default_user=True)
@@ -111,35 +111,34 @@
                         cs = self.__get_cs(revision, repo_name)
                         collection = cs.get_file_history(f_path)
                     except RepositoryError as e:
-                        h.flash(safe_str(e), category='warning')
+                        h.flash(e, category='warning')
                         raise HTTPFound(location=h.url('changelog_home', repo_name=repo_name))
-                collection = list(reversed(collection))
             else:
                 collection = c.db_repo_scm_instance.get_changesets(start=0, end=revision,
-                                                        branch_name=branch_name)
+                                                        branch_name=branch_name, reverse=True)
             c.total_cs = len(collection)
 
-            c.cs_pagination = RepoPage(collection, page=p, item_count=c.total_cs,
-                                    items_per_page=c.size, branch=branch_name,)
+            c.cs_pagination = Page(collection, page=p, item_count=c.total_cs, items_per_page=c.size,
+                                   branch=branch_name)
 
             page_revisions = [x.raw_id for x in c.cs_pagination]
             c.cs_comments = c.db_repo.get_comments(page_revisions)
             c.cs_statuses = c.db_repo.statuses(page_revisions)
         except EmptyRepositoryError as e:
-            h.flash(safe_str(e), category='warning')
+            h.flash(e, category='warning')
             raise HTTPFound(location=url('summary_home', repo_name=c.repo_name))
         except (RepositoryError, ChangesetDoesNotExistError, Exception) as e:
             log.error(traceback.format_exc())
-            h.flash(safe_str(e), category='error')
+            h.flash(e, category='error')
             raise HTTPFound(location=url('changelog_home', repo_name=c.repo_name))
 
         c.branch_name = branch_name
         c.branch_filters = [('', _('None'))] + \
-            [(k, k) for k in c.db_repo_scm_instance.branches.keys()]
+            [(k, k) for k in c.db_repo_scm_instance.branches]
         if c.db_repo_scm_instance.closed_branches:
             prefix = _('(closed)') + ' '
             c.branch_filters += [('-', '-')] + \
-                [(k, prefix + k) for k in c.db_repo_scm_instance.closed_branches.keys()]
+                [(k, prefix + k) for k in c.db_repo_scm_instance.closed_branches]
         revs = []
         if not f_path:
             revs = [x.revision for x in c.cs_pagination]
--- a/kallithea/controllers/changeset.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/changeset.py	Sat May 02 21:20:43 2020 +0200
@@ -25,6 +25,7 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
+import binascii
 import logging
 import traceback
 from collections import OrderedDict, defaultdict
@@ -32,7 +33,7 @@
 from tg import request, response
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
-from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPFound, HTTPNotFound
+from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPNotFound
 
 import kallithea.lib.helpers as h
 from kallithea.lib import diffs
@@ -40,7 +41,7 @@
 from kallithea.lib.base import BaseRepoController, jsonify, render
 from kallithea.lib.graphmod import graph_data
 from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import ascii_str, safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
 from kallithea.model.changeset_status import ChangesetStatusModel
@@ -65,7 +66,7 @@
 
 def get_ignore_ws(fid, GET):
     ig_ws_global = GET.get('ignorews')
-    ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
+    ig_ws = [k for k in GET.getall(fid) if k.startswith('WS')]
     if ig_ws:
         try:
             return int(ig_ws[0].split(':')[-1])
@@ -108,9 +109,9 @@
 def get_line_ctx(fid, GET):
     ln_ctx_global = GET.get('context')
     if fid:
-        ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
+        ln_ctx = [k for k in GET.getall(fid) if k.startswith('C')]
     else:
-        _ln_ctx = filter(lambda k: k.startswith('C'), GET)
+        _ln_ctx = [k for k in GET if k.startswith('C')]
         ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
         if ln_ctx:
             ln_ctx = [ln_ctx]
@@ -214,7 +215,6 @@
             return {
                'location': h.url('my_pullrequests'), # or repo pr list?
             }
-            raise HTTPFound(location=h.url('my_pullrequests')) # or repo pr list?
         raise HTTPForbidden()
 
     text = request.POST.get('text', '').strip()
@@ -256,7 +256,7 @@
     Session().commit()
 
     data = {
-       'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
+       'target_id': h.safeid(request.POST.get('f_path')),
     }
     if comment is not None:
         c.comment = comment
@@ -395,6 +395,8 @@
             c.changeset = c.cs_ranges[0]
             c.parent_tmpl = ''.join(['# Parent  %s\n' % x.raw_id
                                      for x in c.changeset.parents])
+            c.changeset_graft_source_hash = ascii_str(c.changeset.extra.get(b'source', b''))
+            c.changeset_transplant_source_hash = ascii_str(binascii.hexlify(c.changeset.extra.get(b'transplant_source', b'')))
         if method == 'download':
             response.content_type = 'text/plain'
             response.content_disposition = 'attachment; filename=%s.diff' \
@@ -402,7 +404,7 @@
             return raw_diff
         elif method == 'patch':
             response.content_type = 'text/plain'
-            c.diff = safe_unicode(raw_diff)
+            c.diff = safe_str(raw_diff)
             return render('changeset/patch_changeset.html')
         elif method == 'raw':
             response.content_type = 'text/plain'
--- a/kallithea/controllers/compare.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/compare.py	Sat May 02 21:20:43 2020 +0200
@@ -30,6 +30,7 @@
 import logging
 import re
 
+import mercurial.unionrepo
 from tg import request
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
@@ -42,8 +43,7 @@
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.graphmod import graph_data
-from kallithea.lib.utils2 import safe_int, safe_str
-from kallithea.lib.vcs.utils.hgcompat import unionrepo
+from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes, safe_int
 from kallithea.model.db import Repository
 
 
@@ -97,14 +97,9 @@
         elif alias == 'hg':
             # case two independent repos
             if org_repo != other_repo:
-                try:
-                    hgrepo = unionrepo.makeunionrepository(other_repo.baseui,
-                                                           other_repo.path,
-                                                           org_repo.path)
-                except AttributeError: # makeunionrepository was introduced in Mercurial 4.8 23f2299e9e53
-                    hgrepo = unionrepo.unionrepository(other_repo.baseui,
-                                                       other_repo.path,
-                                                       org_repo.path)
+                hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui,
+                                                       safe_bytes(other_repo.path),
+                                                       safe_bytes(org_repo.path))
                 # all ancestors of other_rev will be in other_repo and
                 # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
 
@@ -112,21 +107,27 @@
             else:
                 hgrepo = other_repo._repo
 
-            ancestors = [hgrepo[ancestor].hex() for ancestor in
-                         hgrepo.revs("id(%s) & ::id(%s)", other_rev, org_rev)]
+            ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
+                         hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))]
             if ancestors:
                 log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev)
             else:
                 log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
-                ancestors = [hgrepo[ancestor].hex() for ancestor in
-                             hgrepo.revs("heads(::id(%s) & ::id(%s))", org_rev, other_rev)] # FIXME: expensive!
+                ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
+                             hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive!
 
-            other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
-                                     other_rev, org_rev, org_rev)
-            other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
-            org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
-                                   org_rev, other_rev, other_rev)
-            org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
+            other_changesets = [
+                other_repo.get_changeset(rev)
+                for rev in hgrepo.revs(
+                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
+                    ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev))
+            ]
+            org_changesets = [
+                org_repo.get_changeset(ascii_str(hgrepo[rev].hex()))
+                for rev in hgrepo.revs(
+                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
+                    ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev))
+            ]
 
         elif alias == 'git':
             if org_repo != other_repo:
@@ -134,15 +135,15 @@
                 from dulwich.client import SubprocessGitClient
 
                 gitrepo = Repo(org_repo.path)
-                SubprocessGitClient(thin_packs=False).fetch(safe_str(other_repo.path), gitrepo)
+                SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
 
                 gitrepo_remote = Repo(other_repo.path)
-                SubprocessGitClient(thin_packs=False).fetch(safe_str(org_repo.path), gitrepo_remote)
+                SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote)
 
                 revs = [
-                    x.commit.id
-                    for x in gitrepo_remote.get_walker(include=[other_rev],
-                                                       exclude=[org_rev])
+                    ascii_str(x.commit.id)
+                    for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)],
+                                                       exclude=[ascii_bytes(org_rev)])
                 ]
                 other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
                 if other_changesets:
@@ -155,13 +156,13 @@
                 gitrepo_remote.close()
 
             else:
-                so, se = org_repo.run_git_command(
+                so = org_repo.run_git_command(
                     ['log', '--reverse', '--pretty=format:%H',
                      '-s', '%s..%s' % (org_rev, other_rev)]
                 )
                 other_changesets = [org_repo.get_changeset(cs)
                               for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
-                so, se = org_repo.run_git_command(
+                so = org_repo.run_git_command(
                     ['merge-base', org_rev, other_rev]
                 )
                 ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]]
@@ -277,7 +278,7 @@
                                       ignore_whitespace=ignore_whitespace,
                                       context=line_context)
 
-        diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
+        diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit)
         c.limited_diff = diff_processor.limited_diff
         c.file_diff_data = []
         c.lines_added = 0
--- a/kallithea/controllers/error.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/error.py	Sat May 02 21:20:43 2020 +0200
@@ -25,7 +25,7 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import cgi
+import html
 import logging
 
 from tg import config, expose, request
@@ -64,8 +64,7 @@
             'protocol': e.get('wsgi.url_scheme'),
             'host': e.get('HTTP_HOST'), }
         if resp:
-            c.error_message = cgi.escape(request.GET.get('code',
-                                                         str(resp.status)))
+            c.error_message = html.escape(request.GET.get('code', str(resp.status)))
             c.error_explanation = self.get_error_explanation(resp.status_int)
         else:
             c.error_message = _('No response')
--- a/kallithea/controllers/feed.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/feed.py	Sat May 02 21:20:43 2020 +0200
@@ -32,23 +32,19 @@
 from tg import response
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
-from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 
 from kallithea import CONFIG
+from kallithea.lib import feeds
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 from kallithea.lib.base import BaseRepoController
 from kallithea.lib.diffs import DiffProcessor
-from kallithea.lib.utils2 import safe_int, safe_unicode, str2bool
+from kallithea.lib.utils2 import safe_int, safe_str, str2bool
 
 
 log = logging.getLogger(__name__)
 
 
-language = 'en-us'
-ttl = "5"
-
-
 class FeedController(BaseRepoController):
 
     @LoginRequired(allow_default_user=True)
@@ -98,64 +94,41 @@
         desc_msg.extend(changes)
         if str2bool(CONFIG.get('rss_include_diff', False)):
             desc_msg.append('\n\n')
-            desc_msg.append(raw_diff)
+            desc_msg.append(safe_str(raw_diff))
         desc_msg.append('</pre>')
-        return map(safe_unicode, desc_msg)
+        return desc_msg
 
-    def atom(self, repo_name):
-        """Produce an atom-1.0 feed via feedgenerator module"""
+    def _feed(self, repo_name, feeder):
+        """Produce a simple feed"""
 
-        @cache_region('long_term', '_get_feed_from_cache')
+        @cache_region('long_term_file', '_get_feed_from_cache')
         def _get_feed_from_cache(*_cache_keys):  # parameters are not really used - only as caching key
-            feed = Atom1Feed(
+            header = dict(
                 title=_('%s %s feed') % (c.site_name, repo_name),
                 link=h.canonical_url('summary_home', repo_name=repo_name),
                 description=_('Changes on %s repository') % repo_name,
-                language=language,
-                ttl=ttl
             )
 
             rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
+            entries=[]
             for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
-                feed.add_item(title=self._get_title(cs),
-                              link=h.canonical_url('changeset_home', repo_name=repo_name,
-                                       revision=cs.raw_id),
-                              author_name=cs.author,
-                              description=''.join(self.__get_desc(cs)),
-                              pubdate=cs.date,
-                              )
+                entries.append(dict(
+                    title=self._get_title(cs),
+                    link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id),
+                    author_email=cs.author_email,
+                    author_name=cs.author_name,
+                    description=''.join(self.__get_desc(cs)),
+                    pubdate=cs.date,
+                ))
+            return feeder.render(header, entries)
 
-            response.content_type = feed.mime_type
-            return feed.writeString('utf-8')
+        response.content_type = feeder.content_type
+        return _get_feed_from_cache(repo_name, feeder.__name__)
 
-        kind = 'ATOM'
-        return _get_feed_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id'))
+    def atom(self, repo_name):
+        """Produce a simple atom-1.0 feed"""
+        return self._feed(repo_name, feeds.AtomFeed)
 
     def rss(self, repo_name):
-        """Produce an rss2 feed via feedgenerator module"""
-
-        @cache_region('long_term', '_get_feed_from_cache')
-        def _get_feed_from_cache(*_cache_keys):  # parameters are not really used - only as caching key
-            feed = Rss201rev2Feed(
-                title=_('%s %s feed') % (c.site_name, repo_name),
-                link=h.canonical_url('summary_home', repo_name=repo_name),
-                description=_('Changes on %s repository') % repo_name,
-                language=language,
-                ttl=ttl
-            )
-
-            rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
-            for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
-                feed.add_item(title=self._get_title(cs),
-                              link=h.canonical_url('changeset_home', repo_name=repo_name,
-                                       revision=cs.raw_id),
-                              author_name=cs.author,
-                              description=''.join(self.__get_desc(cs)),
-                              pubdate=cs.date,
-                             )
-
-            response.content_type = feed.mime_type
-            return feed.writeString('utf-8')
-
-        kind = 'RSS'
-        return _get_feed_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id'))
+        """Produce a simple rss2 feed"""
+        return self._feed(repo_name, feeds.RssFeed)
--- a/kallithea/controllers/files.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/files.py	Sat May 02 21:20:43 2020 +0200
@@ -49,10 +49,10 @@
 from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_int, safe_str, str2bool
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.conf import settings
-from kallithea.lib.vcs.exceptions import (
-    ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError, NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
+from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError,
+                                          NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
 from kallithea.lib.vcs.nodes import FileNode
-from kallithea.model.db import Repository
+from kallithea.model import db
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import ScmModel
 
@@ -90,7 +90,7 @@
             h.flash(msg, category='error')
             raise HTTPNotFound()
         except RepositoryError as e:
-            h.flash(safe_str(e), category='error')
+            h.flash(e, category='error')
             raise HTTPNotFound()
 
     def __get_filenode(self, cs, path):
@@ -110,7 +110,7 @@
             h.flash(msg, category='error')
             raise HTTPNotFound()
         except RepositoryError as e:
-            h.flash(safe_str(e), category='error')
+            h.flash(e, category='error')
             raise HTTPNotFound()
 
         return file_node
@@ -163,7 +163,7 @@
                 c.load_full_history = False
                 # determine if we're on branch head
                 _branches = c.db_repo_scm_instance.branches
-                c.on_branch_head = revision in _branches.keys() + _branches.values()
+                c.on_branch_head = revision in _branches or revision in _branches.values()
                 _hist = []
                 c.file_history = []
                 if c.load_full_history:
@@ -175,7 +175,7 @@
             else:
                 c.authors = c.file_history = []
         except RepositoryError as e:
-            h.flash(safe_str(e), category='error')
+            h.flash(e, category='error')
             raise HTTPNotFound()
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
@@ -232,8 +232,8 @@
         cs = self.__get_cs(revision)
         file_node = self.__get_filenode(cs, f_path)
 
-        response.content_disposition = 'attachment; filename=%s' % \
-            safe_str(f_path.split(Repository.url_sep())[-1])
+        response.content_disposition = \
+            'attachment; filename=%s' % f_path.split(db.URL_SEP)[-1]
 
         response.content_type = file_node.mimetype
         return file_node.content
@@ -277,8 +277,7 @@
                 mimetype, dispo = 'text/plain', 'inline'
 
         if dispo == 'attachment':
-            dispo = 'attachment; filename=%s' % \
-                        safe_str(f_path.split(os.sep)[-1])
+            dispo = 'attachment; filename=%s' % f_path.split(os.sep)[-1]
 
         response.content_disposition = dispo
         response.content_type = mimetype
@@ -292,7 +291,7 @@
         # create multiple heads via file editing
         _branches = repo.scm_instance.branches
         # check if revision is a branch name or branch hash
-        if revision not in _branches.keys() + _branches.values():
+        if revision not in _branches and revision not in _branches.values():
             h.flash(_('You can only delete files with revision '
                       'being a valid branch'), category='warning')
             raise HTTPFound(location=h.url('files_home',
@@ -346,7 +345,7 @@
         # create multiple heads via file editing
         _branches = repo.scm_instance.branches
         # check if revision is a branch name or branch hash
-        if revision not in _branches.keys() + _branches.values():
+        if revision not in _branches and revision not in _branches.values():
             h.flash(_('You can only edit files with revision '
                       'being a valid branch'), category='warning')
             raise HTTPFound(location=h.url('files_home',
@@ -365,8 +364,7 @@
         c.f_path = f_path
 
         if r_post:
-
-            old_content = c.file.content
+            old_content = safe_str(c.file.content)
             sl = old_content.splitlines(1)
             first_line = sl[0] if sl else ''
             # modes:  0 - Unix, 1 - Mac, 2 - DOS
@@ -509,8 +507,7 @@
 
         from kallithea import CONFIG
         rev_name = cs.raw_id[:12]
-        archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')),
-                                    safe_str(rev_name), ext)
+        archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext)
 
         archive_path = None
         cached_archive_path = None
--- a/kallithea/controllers/forks.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/forks.py	Sat May 02 21:20:43 2020 +0200
@@ -35,13 +35,14 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound
 
+import kallithea
 import kallithea.lib.helpers as h
 from kallithea.config.routing import url
 from kallithea.lib.auth import HasPermissionAny, HasPermissionAnyDecorator, HasRepoPermissionLevel, HasRepoPermissionLevelDecorator, LoginRequired
 from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import safe_int
-from kallithea.model.db import Repository, Ui, User, UserFollowing
+from kallithea.model.db import Repository, Ui, UserFollowing
 from kallithea.model.forms import RepoForkForm
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import AvailableRepoGroupChoices, ScmModel
@@ -76,7 +77,7 @@
             h.not_mapped_error(c.repo_name)
             raise HTTPFound(location=url('repos'))
 
-        c.default_user_id = User.get_default_user().user_id
+        c.default_user_id = kallithea.DEFAULT_USER_ID
         c.in_public_journal = UserFollowing.query() \
             .filter(UserFollowing.user_id == c.default_user_id) \
             .filter(UserFollowing.follows_repository == c.repo_info).scalar()
--- a/kallithea/controllers/home.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/home.py	Sat May 02 21:20:43 2020 +0200
@@ -37,7 +37,7 @@
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 from kallithea.lib.base import BaseController, jsonify, render
-from kallithea.lib.utils import conditional_cache
+from kallithea.lib.utils2 import safe_str
 from kallithea.model.db import RepoGroup, Repository, User, UserGroup
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import UserGroupList
@@ -67,9 +67,7 @@
     @LoginRequired(allow_default_user=True)
     @jsonify
     def repo_switcher_data(self):
-        # wrapper for conditional cache
-        def _c():
-            log.debug('generating switcher repo/groups list')
+        if request.is_xhr:
             all_repos = Repository.query(sorted=True).all()
             repo_iter = self.scm_model.get_repos(all_repos)
             all_groups = RepoGroup.query(sorted=True).all()
@@ -96,17 +94,16 @@
                     ],
                    }]
 
+            for res_dict in res:
+                for child in (res_dict['children']):
+                    child['obj'].pop('_changeset_cache', None)  # bytes cannot be encoded in json ... but this value isn't relevant on client side at all ...
+
             data = {
                 'more': False,
                 'results': res,
             }
             return data
 
-        if request.is_xhr:
-            condition = False
-            compute = conditional_cache('short_term', 'cache_desc',
-                                        condition=condition, func=_c)
-            return compute()
         else:
             raise HTTPBadRequest()
 
@@ -120,25 +117,25 @@
         if _branches:
             res.append({
                 'text': _('Branch'),
-                'children': [{'id': rev, 'text': name, 'type': 'branch'} for name, rev in _branches]
+                'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'branch'} for name, rev in _branches]
             })
         _closed_branches = repo.closed_branches.items()
         if _closed_branches:
             res.append({
                 'text': _('Closed Branches'),
-                'children': [{'id': rev, 'text': name, 'type': 'closed-branch'} for name, rev in _closed_branches]
+                'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'closed-branch'} for name, rev in _closed_branches]
             })
         _tags = repo.tags.items()
         if _tags:
             res.append({
                 'text': _('Tag'),
-                'children': [{'id': rev, 'text': name, 'type': 'tag'} for name, rev in _tags]
+                'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'tag'} for name, rev in _tags]
             })
         _bookmarks = repo.bookmarks.items()
         if _bookmarks:
             res.append({
                 'text': _('Bookmark'),
-                'children': [{'id': rev, 'text': name, 'type': 'book'} for name, rev in _bookmarks]
+                'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'book'} for name, rev in _bookmarks]
             })
         data = {
             'more': False,
--- a/kallithea/controllers/journal.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/journal.py	Sat May 02 21:20:43 2020 +0200
@@ -23,7 +23,6 @@
 :author: marcink
 :copyright: (c) 2013 RhodeCode GmbH, and others.
 :license: GPLv3, see LICENSE.md for more details.
-
 """
 
 import logging
@@ -35,12 +34,11 @@
 from tg import request, response
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
-from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 from webob.exc import HTTPBadRequest
 
 import kallithea.lib.helpers as h
-from kallithea.config.routing import url
 from kallithea.controllers.admin.admin import _journal_filter
+from kallithea.lib import feeds
 from kallithea.lib.auth import LoginRequired
 from kallithea.lib.base import BaseController, render
 from kallithea.lib.page import Page
@@ -105,22 +103,17 @@
 
         return journal
 
-    def _atom_feed(self, repos, public=True):
+    def _feed(self, repos, feeder, link, desc):
+        response.content_type = feeder.content_type
         journal = self._get_journal_data(repos)
-        if public:
-            _link = h.canonical_url('public_journal_atom')
-            _desc = '%s %s %s' % (c.site_name, _('Public Journal'),
-                                  'atom feed')
-        else:
-            _link = h.canonical_url('journal_atom')
-            _desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed')
 
-        feed = Atom1Feed(title=_desc,
-                         link=_link,
-                         description=_desc,
-                         language=language,
-                         ttl=ttl)
+        header = dict(
+            title=desc,
+            link=link,
+            description=desc,
+        )
 
+        entries=[]
         for entry in journal[:feed_nr]:
             user = entry.user
             if user is None:
@@ -131,63 +124,43 @@
             action, action_extra, ico = h.action_parser(entry, feed=True)
             title = "%s - %s %s" % (user.short_contact, action(),
                                     entry.repository.repo_name)
-            desc = action_extra()
             _url = None
             if entry.repository is not None:
                 _url = h.canonical_url('changelog_home',
                            repo_name=entry.repository.repo_name)
 
-            feed.add_item(title=title,
-                          pubdate=entry.action_date,
-                          link=_url or h.canonical_url(''),
-                          author_email=user.email,
-                          author_name=user.full_contact,
-                          description=desc)
+            entries.append(dict(
+                title=title,
+                pubdate=entry.action_date,
+                link=_url or h.canonical_url(''),
+                author_email=user.email,
+                author_name=user.full_name_or_username,
+                description=action_extra(),
+            ))
+
+        return feeder.render(header, entries)
 
-        response.content_type = feed.mime_type
-        return feed.writeString('utf-8')
+    def _atom_feed(self, repos, public=True):
+        if public:
+            link = h.canonical_url('public_journal_atom')
+            desc = '%s %s %s' % (c.site_name, _('Public Journal'),
+                                  'atom feed')
+        else:
+            link = h.canonical_url('journal_atom')
+            desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed')
+
+        return self._feed(repos, feeds.AtomFeed, link, desc)
 
     def _rss_feed(self, repos, public=True):
-        journal = self._get_journal_data(repos)
         if public:
-            _link = h.canonical_url('public_journal_atom')
-            _desc = '%s %s %s' % (c.site_name, _('Public Journal'),
+            link = h.canonical_url('public_journal_atom')
+            desc = '%s %s %s' % (c.site_name, _('Public Journal'),
                                   'rss feed')
         else:
-            _link = h.canonical_url('journal_atom')
-            _desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed')
-
-        feed = Rss201rev2Feed(title=_desc,
-                         link=_link,
-                         description=_desc,
-                         language=language,
-                         ttl=ttl)
+            link = h.canonical_url('journal_atom')
+            desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed')
 
-        for entry in journal[:feed_nr]:
-            user = entry.user
-            if user is None:
-                # fix deleted users
-                user = AttributeDict({'short_contact': entry.username,
-                                      'email': '',
-                                      'full_contact': ''})
-            action, action_extra, ico = h.action_parser(entry, feed=True)
-            title = "%s - %s %s" % (user.short_contact, action(),
-                                    entry.repository.repo_name)
-            desc = action_extra()
-            _url = None
-            if entry.repository is not None:
-                _url = h.canonical_url('changelog_home',
-                           repo_name=entry.repository.repo_name)
-
-            feed.add_item(title=title,
-                          pubdate=entry.action_date,
-                          link=_url or h.canonical_url(''),
-                          author_email=user.email,
-                          author_name=user.full_contact,
-                          description=desc)
-
-        response.content_type = feed.mime_type
-        return feed.writeString('utf-8')
+        return self._feed(repos, feeds.RssFeed, link, desc)
 
     @LoginRequired()
     def index(self):
@@ -201,10 +174,8 @@
 
         journal = self._get_journal_data(c.following)
 
-        def url_generator(**kw):
-            return url.current(filter=c.search_term, **kw)
-
-        c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator)
+        c.journal_pager = Page(journal, page=p, items_per_page=20,
+                               filter=c.search_term)
         c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager)
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
@@ -221,9 +192,7 @@
 
     @LoginRequired()
     def journal_atom(self):
-        """
-        Produce an atom-1.0 feed via feedgenerator module
-        """
+        """Produce a simple atom-1.0 feed"""
         following = UserFollowing.query() \
             .filter(UserFollowing.user_id == request.authuser.user_id) \
             .options(joinedload(UserFollowing.follows_repository)) \
@@ -232,9 +201,7 @@
 
     @LoginRequired()
     def journal_rss(self):
-        """
-        Produce an rss feed via feedgenerator module
-        """
+        """Produce a simple rss2 feed"""
         following = UserFollowing.query() \
             .filter(UserFollowing.user_id == request.authuser.user_id) \
             .options(joinedload(UserFollowing.follows_repository)) \
@@ -290,9 +257,7 @@
 
     @LoginRequired(allow_default_user=True)
     def public_journal_atom(self):
-        """
-        Produce an atom-1.0 feed via feedgenerator module
-        """
+        """Produce a simple atom-1.0 feed"""
         c.following = UserFollowing.query() \
             .filter(UserFollowing.user_id == request.authuser.user_id) \
             .options(joinedload(UserFollowing.follows_repository)) \
@@ -302,9 +267,7 @@
 
     @LoginRequired(allow_default_user=True)
     def public_journal_rss(self):
-        """
-        Produce an rss2 feed via feedgenerator module
-        """
+        """Produce a simple rss2 feed"""
         c.following = UserFollowing.query() \
             .filter(UserFollowing.user_id == request.authuser.user_id) \
             .options(joinedload(UserFollowing.follows_repository)) \
--- a/kallithea/controllers/login.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/login.py	Sat May 02 21:20:43 2020 +0200
@@ -41,7 +41,6 @@
 from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator
 from kallithea.lib.base import BaseController, log_in_user, render
 from kallithea.lib.exceptions import UserCreationError
-from kallithea.lib.utils2 import safe_str
 from kallithea.model.db import Setting, User
 from kallithea.model.forms import LoginForm, PasswordResetConfirmationForm, PasswordResetRequestForm, RegisterForm
 from kallithea.model.meta import Session
@@ -68,7 +67,7 @@
         return _re.match(came_from) is not None
 
     def index(self):
-        c.came_from = safe_str(request.GET.get('came_from', ''))
+        c.came_from = request.GET.get('came_from', '')
         if c.came_from:
             if not self._validate_came_from(c.came_from):
                 log.error('Invalid came_from (not server-relative): %r', c.came_from)
@@ -80,10 +79,11 @@
             # import Login Form validator class
             login_form = LoginForm()()
             try:
+                # login_form will check username/password using ValidAuth and report failure to the user
                 c.form_result = login_form.to_python(dict(request.POST))
-                # form checks for username/password, now we're authenticated
                 username = c.form_result['username']
-                user = User.get_by_username_or_email(username, case_insensitive=True)
+                user = User.get_by_username_or_email(username)
+                assert user is not None  # the same user get just passed in the form validation
             except formencode.Invalid as errors:
                 defaults = errors.value
                 # remove password from filling in form again
@@ -102,9 +102,11 @@
                 # Exception itself
                 h.flash(e, 'error')
             else:
+                # login_form already validated the password - now set the session cookie accordingly
                 auth_user = log_in_user(user, c.form_result['remember'], is_external_auth=False, ip_addr=request.ip_addr)
-                # TODO: handle auth_user is None as failed authentication?
-                raise HTTPFound(location=c.came_from)
+                if auth_user:
+                    raise HTTPFound(location=c.came_from)
+                h.flash(_('Authentication failed.'), 'error')
         else:
             # redirect if already logged in
             if not request.authuser.is_anonymous:
--- a/kallithea/controllers/pullrequests.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/pullrequests.py	Sat May 02 21:20:43 2020 +0200
@@ -29,6 +29,7 @@
 import traceback
 
 import formencode
+import mercurial.unionrepo
 from tg import request
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
@@ -42,10 +43,8 @@
 from kallithea.lib.base import BaseRepoController, jsonify, render
 from kallithea.lib.graphmod import graph_data
 from kallithea.lib.page import Page
-from kallithea.lib.utils2 import safe_int
+from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError
-from kallithea.lib.vcs.utils import safe_str
-from kallithea.lib.vcs.utils.hgcompat import unionrepo
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.comment import ChangesetCommentsModel
 from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, Repository, User
@@ -83,22 +82,15 @@
         # list named branches that has been merged to this named branch - it should probably merge back
         peers = []
 
-        if rev:
-            rev = safe_str(rev)
-
-        if branch:
-            branch = safe_str(branch)
-
         if branch_rev:
-            branch_rev = safe_str(branch_rev)
             # a revset not restricting to merge() would be better
             # (especially because it would get the branch point)
             # ... but is currently too expensive
             # including branches of children could be nice too
             peerbranches = set()
             for i in repo._repo.revs(
-                "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
-                branch_rev, branch_rev
+                b"sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
+                ascii_bytes(branch_rev), ascii_bytes(branch_rev),
             ):
                 for abranch in repo.get_changeset(i).branches:
                     if abranch not in peerbranches:
@@ -111,7 +103,7 @@
         tipbranch = None
 
         branches = []
-        for abranch, branchrev in repo.branches.iteritems():
+        for abranch, branchrev in repo.branches.items():
             n = 'branch:%s:%s' % (abranch, branchrev)
             desc = abranch
             if branchrev == tiprev:
@@ -135,14 +127,14 @@
                 log.debug('branch %r not found in %s', branch, repo)
 
         bookmarks = []
-        for bookmark, bookmarkrev in repo.bookmarks.iteritems():
+        for bookmark, bookmarkrev in repo.bookmarks.items():
             n = 'book:%s:%s' % (bookmark, bookmarkrev)
             bookmarks.append((n, bookmark))
             if rev == bookmarkrev:
                 selected = n
 
         tags = []
-        for tag, tagrev in repo.tags.iteritems():
+        for tag, tagrev in repo.tags.items():
             if tag == 'tip':
                 continue
             n = 'tag:%s:%s' % (tag, tagrev)
@@ -173,7 +165,7 @@
                 if 'master' in repo.branches:
                     selected = 'branch:master:%s' % repo.branches['master']
                 else:
-                    k, v = repo.branches.items()[0]
+                    k, v = list(repo.branches.items())[0]
                     selected = 'branch:%s:%s' % (k, v)
 
         groups = [(specials, _("Special")),
@@ -201,6 +193,11 @@
     def show_all(self, repo_name):
         c.from_ = request.GET.get('from_') or ''
         c.closed = request.GET.get('closed') or ''
+        url_params = {}
+        if c.from_:
+            url_params['from_'] = 1
+        if c.closed:
+            url_params['closed'] = 1
         p = safe_int(request.GET.get('page'), 1)
 
         q = PullRequest.query(include_closed=c.closed, sorted=True)
@@ -210,7 +207,7 @@
             q = q.filter_by(other_repo=c.db_repo)
         c.pull_requests = q.all()
 
-        c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100)
+        c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100, **url_params)
 
         return render('/pullrequests/pullrequest_show_all.html')
 
@@ -335,7 +332,7 @@
         try:
             cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers)
         except CreatePullRequestAction.ValidationError as e:
-            h.flash(str(e), category='error', logf=log.error)
+            h.flash(e, category='error', logf=log.error)
             raise HTTPNotFound
 
         try:
@@ -358,7 +355,7 @@
         try:
             cmd = CreatePullRequestIterationAction(old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers)
         except CreatePullRequestAction.ValidationError as e:
-            h.flash(str(e), category='error', logf=log.error)
+            h.flash(e, category='error', logf=log.error)
             raise HTTPNotFound
 
         try:
@@ -531,14 +528,9 @@
                             # Note: org_scm_instance.path must come first so all
                             # valid revision numbers are 100% org_scm compatible
                             # - both for avail_revs and for revset results
-                            try:
-                                hgrepo = unionrepo.makeunionrepository(org_scm_instance.baseui,
-                                                                       org_scm_instance.path,
-                                                                       other_scm_instance.path)
-                            except AttributeError: # makeunionrepository was introduced in Mercurial 4.8 23f2299e9e53
-                                hgrepo = unionrepo.unionrepository(org_scm_instance.baseui,
-                                                                   org_scm_instance.path,
-                                                                   other_scm_instance.path)
+                            hgrepo = mercurial.unionrepo.makeunionrepository(org_scm_instance.baseui,
+                                                                   safe_bytes(org_scm_instance.path),
+                                                                   safe_bytes(other_scm_instance.path))
                         else:
                             hgrepo = org_scm_instance._repo
                         show = set(hgrepo.revs('::%ld & !::parents(%s) & !::%s',
@@ -588,11 +580,11 @@
         log.debug('running diff between %s and %s in %s',
                   c.a_rev, c.cs_rev, org_scm_instance.path)
         try:
-            raw_diff = diffs.get_diff(org_scm_instance, rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
+            raw_diff = diffs.get_diff(org_scm_instance, rev1=c.a_rev, rev2=c.cs_rev,
                                       ignore_whitespace=ignore_whitespace, context=line_context)
         except ChangesetDoesNotExistError:
-            raw_diff = _("The diff can't be shown - the PR revisions could not be found.")
-        diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
+            raw_diff = safe_bytes(_("The diff can't be shown - the PR revisions could not be found."))
+        diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit)
         c.limited_diff = diff_processor.limited_diff
         c.file_diff_data = []
         c.lines_added = 0
--- a/kallithea/controllers/search.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/search.py	Sat May 02 21:20:43 2020 +0200
@@ -27,12 +27,10 @@
 
 import logging
 import traceback
-import urllib
 
 from tg import config, request
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
-from webhelpers2.html.tools import update_params
 from whoosh.index import EmptyIndexError, exists_in, open_dir
 from whoosh.qparser import QueryParser, QueryParserError
 from whoosh.query import Phrase, Prefix
@@ -41,7 +39,7 @@
 from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA, WhooshResultWrapper
 from kallithea.lib.page import Page
-from kallithea.lib.utils2 import safe_int, safe_str
+from kallithea.lib.utils2 import safe_int
 from kallithea.model.repo import RepoModel
 
 
@@ -96,9 +94,9 @@
                 if c.repo_name:
                     # use "repository_rawname:" instead of "repository:"
                     # for case-sensitive matching
-                    cur_query = u'repository_rawname:%s %s' % (c.repo_name, cur_query)
+                    cur_query = 'repository_rawname:%s %s' % (c.repo_name, cur_query)
                 try:
-                    query = qp.parse(unicode(cur_query))
+                    query = qp.parse(cur_query)
                     # extract words for highlight
                     if isinstance(query, Phrase):
                         highlight_items.update(query.words)
@@ -119,9 +117,6 @@
                         res_ln, results.runtime
                     )
 
-                    def url_generator(**kw):
-                        q = urllib.quote(safe_str(c.cur_query))
-                        return update_params("?q=%s&type=%s" % (q, safe_str(c.cur_type)), **kw)
                     repo_location = RepoModel().repos_path
                     c.formated_results = Page(
                         WhooshResultWrapper(search_type, searcher, matcher,
@@ -129,7 +124,8 @@
                         page=p,
                         item_count=res_ln,
                         items_per_page=10,
-                        url=url_generator
+                        type=c.cur_type,
+                        q=c.cur_query,
                     )
 
                 except QueryParserError:
--- a/kallithea/controllers/summary.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/controllers/summary.py	Sat May 02 21:20:43 2020 +0200
@@ -38,14 +38,15 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPBadRequest
 
+import kallithea.lib.helpers as h
 from kallithea.config.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP
+from kallithea.lib import ext_json
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 from kallithea.lib.base import BaseRepoController, jsonify, render
 from kallithea.lib.celerylib.tasks import get_commits_stats
-from kallithea.lib.compat import json
 from kallithea.lib.markup_renderer import MarkupRenderer
-from kallithea.lib.page import RepoPage
-from kallithea.lib.utils2 import safe_int
+from kallithea.lib.page import Page
+from kallithea.lib.utils2 import safe_int, safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError
 from kallithea.lib.vcs.nodes import FileNode
@@ -65,7 +66,7 @@
         repo_name = db_repo.repo_name
         log.debug('Looking for README file')
 
-        @cache_region('long_term', '_get_readme_from_cache')
+        @cache_region('long_term_file', '_get_readme_from_cache')
         def _get_readme_from_cache(*_cache_keys):  # parameters are not really used - only as caching key
             readme_data = None
             readme_file = None
@@ -83,7 +84,7 @@
                         readme_file = f
                         log.debug('Found README file `%s` rendering...',
                                   readme_file)
-                        readme_data = renderer.render(readme.content,
+                        readme_data = renderer.render(safe_str(readme.content),
                                                       filename=f)
                         break
                     except NodeDoesNotExistError:
@@ -104,8 +105,12 @@
     def index(self, repo_name):
         p = safe_int(request.GET.get('page'), 1)
         size = safe_int(request.GET.get('size'), 10)
-        collection = c.db_repo_scm_instance
-        c.cs_pagination = RepoPage(collection, page=p, items_per_page=size)
+        try:
+            collection = c.db_repo_scm_instance.get_changesets(reverse=True)
+        except EmptyRepositoryError as e:
+            h.flash(e, category='warning')
+            collection = []
+        c.cs_pagination = Page(collection, page=p, items_per_page=size)
         page_revisions = [x.raw_id for x in list(c.cs_pagination)]
         c.cs_comments = c.db_repo.get_comments(page_revisions)
         c.cs_statuses = c.db_repo.statuses(page_revisions)
@@ -133,17 +138,13 @@
         c.stats_percentage = 0
 
         if stats and stats.languages:
-            c.no_data = False is c.db_repo.enable_statistics
-            lang_stats_d = json.loads(stats.languages)
-
+            lang_stats_d = ext_json.loads(stats.languages)
             lang_stats = [(x, {"count": y,
                                "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')})
                           for x, y in lang_stats_d.items()]
             lang_stats.sort(key=lambda k: (-k[1]['count'], k[0]))
-
             c.trending_languages = lang_stats[:10]
         else:
-            c.no_data = True
             c.trending_languages = []
 
         c.enable_downloads = c.db_repo.enable_downloads
@@ -171,7 +172,7 @@
             c.no_data_msg = _('Statistics are disabled for this repository')
 
         td = date.today() + timedelta(days=1)
-        td_1m = td - timedelta(days=calendar.mdays[td.month])
+        td_1m = td - timedelta(days=calendar.monthrange(td.year, td.month)[1])
         td_1y = td - timedelta(days=365)
 
         ts_min_m = mktime(td_1m.timetuple())
@@ -185,18 +186,16 @@
             .scalar()
         c.stats_percentage = 0
         if stats and stats.languages:
-            c.no_data = False is c.db_repo.enable_statistics
-            lang_stats_d = json.loads(stats.languages)
-            c.commit_data = json.loads(stats.commit_activity)
-            c.overview_data = json.loads(stats.commit_activity_combined)
+            c.commit_data = ext_json.loads(stats.commit_activity)
+            c.overview_data = ext_json.loads(stats.commit_activity_combined)
 
-            lang_stats = ((x, {"count": y,
-                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
-                          for x, y in lang_stats_d.items())
+            lang_stats_d = ext_json.loads(stats.languages)
+            lang_stats = [(x, {"count": y,
+                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')})
+                          for x, y in lang_stats_d.items()]
+            lang_stats.sort(key=lambda k: (-k[1]['count'], k[0]))
+            c.trending_languages = lang_stats[:10]
 
-            c.trending_languages = (
-                sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
-            )
             last_rev = stats.stat_on_revision + 1
             c.repo_last_rev = c.db_repo_scm_instance.count() \
                 if c.db_repo_scm_instance.revisions else 0
@@ -208,8 +207,7 @@
         else:
             c.commit_data = {}
             c.overview_data = ([[ts_min_y, 0], [ts_max_y, 10]])
-            c.trending_languages = {}
-            c.no_data = True
+            c.trending_languages = []
 
         recurse_limit = 500  # don't recurse more than 500 times when parsing
         get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit)
--- a/kallithea/front-end/package-lock.json	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/front-end/package-lock.json	Sat May 02 21:20:43 2020 +0200
@@ -3,18 +3,49 @@
   "requires": true,
   "lockfileVersion": 1,
   "dependencies": {
+    "@babel/code-frame": {
+      "version": "7.8.3",
+      "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz",
+      "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==",
+      "dev": true,
+      "requires": {
+        "@babel/highlight": "^7.8.3"
+      }
+    },
+    "@babel/highlight": {
+      "version": "7.8.3",
+      "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz",
+      "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==",
+      "dev": true,
+      "requires": {
+        "chalk": "^2.0.0",
+        "esutils": "^2.0.2",
+        "js-tokens": "^4.0.0"
+      }
+    },
     "abbrev": {
       "version": "1.1.1",
       "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
       "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
       "dev": true
     },
+    "acorn": {
+      "version": "7.1.0",
+      "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz",
+      "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==",
+      "dev": true
+    },
+    "acorn-jsx": {
+      "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz",
+      "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==",
+      "dev": true
+    },
     "ajv": {
       "version": "6.10.2",
       "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz",
       "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==",
       "dev": true,
-      "optional": true,
       "requires": {
         "fast-deep-equal": "^2.0.1",
         "fast-json-stable-stringify": "^2.0.0",
@@ -28,6 +59,21 @@
       "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=",
       "dev": true
     },
+    "ansi-escapes": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz",
+      "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==",
+      "dev": true,
+      "requires": {
+        "type-fest": "^0.8.1"
+      }
+    },
+    "ansi-regex": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
+      "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==",
+      "dev": true
+    },
     "ansi-styles": {
       "version": "3.2.1",
       "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
@@ -37,6 +83,15 @@
         "color-convert": "^1.9.0"
       }
     },
+    "argparse": {
+      "version": "1.0.10",
+      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+      "dev": true,
+      "requires": {
+        "sprintf-js": "~1.0.2"
+      }
+    },
     "array-find-index": {
       "version": "1.0.2",
       "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz",
@@ -66,6 +121,12 @@
       "dev": true,
       "optional": true
     },
+    "astral-regex": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz",
+      "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==",
+      "dev": true
+    },
     "asynckit": {
       "version": "0.4.0",
       "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
@@ -123,6 +184,12 @@
         "concat-map": "0.0.1"
       }
     },
+    "callsites": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+      "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
+      "dev": true
+    },
     "caseless": {
       "version": "0.12.0",
       "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
@@ -141,6 +208,12 @@
         "supports-color": "^5.3.0"
       }
     },
+    "chardet": {
+      "version": "0.7.0",
+      "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz",
+      "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==",
+      "dev": true
+    },
     "clean-css": {
       "version": "3.4.28",
       "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-3.4.28.tgz",
@@ -162,6 +235,21 @@
         }
       }
     },
+    "cli-cursor": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz",
+      "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==",
+      "dev": true,
+      "requires": {
+        "restore-cursor": "^3.1.0"
+      }
+    },
+    "cli-width": {
+      "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz",
+      "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=",
+      "dev": true
+    },
     "clone": {
       "version": "2.1.2",
       "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz",
@@ -220,6 +308,19 @@
       "dev": true,
       "optional": true
     },
+    "cross-spawn": {
+      "version": "6.0.5",
+      "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz",
+      "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==",
+      "dev": true,
+      "requires": {
+        "nice-try": "^1.0.4",
+        "path-key": "^2.0.1",
+        "semver": "^5.5.0",
+        "shebang-command": "^1.2.0",
+        "which": "^1.2.9"
+      }
+    },
     "dashdash": {
       "version": "1.14.1",
       "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
@@ -262,6 +363,12 @@
       "integrity": "sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI=",
       "dev": true
     },
+    "deep-is": {
+      "version": "0.1.3",
+      "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
+      "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=",
+      "dev": true
+    },
     "delayed-stream": {
       "version": "1.0.0",
       "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
@@ -279,6 +386,64 @@
         "wrappy": "1"
       }
     },
+    "doctrine": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
+      "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
+      "dev": true,
+      "requires": {
+        "esutils": "^2.0.2"
+      }
+    },
+    "dom-serializer": {
+      "version": "0.2.2",
+      "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz",
+      "integrity": "sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==",
+      "dev": true,
+      "requires": {
+        "domelementtype": "^2.0.1",
+        "entities": "^2.0.0"
+      },
+      "dependencies": {
+        "domelementtype": {
+          "version": "2.0.1",
+          "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.0.1.tgz",
+          "integrity": "sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ==",
+          "dev": true
+        },
+        "entities": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/entities/-/entities-2.0.0.tgz",
+          "integrity": "sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw==",
+          "dev": true
+        }
+      }
+    },
+    "domelementtype": {
+      "version": "1.3.1",
+      "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz",
+      "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==",
+      "dev": true
+    },
+    "domhandler": {
+      "version": "2.4.2",
+      "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz",
+      "integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==",
+      "dev": true,
+      "requires": {
+        "domelementtype": "1"
+      }
+    },
+    "domutils": {
+      "version": "1.7.0",
+      "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.7.0.tgz",
+      "integrity": "sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==",
+      "dev": true,
+      "requires": {
+        "dom-serializer": "0",
+        "domelementtype": "1"
+      }
+    },
     "ecc-jsbn": {
       "version": "0.1.2",
       "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
@@ -290,6 +455,18 @@
         "safer-buffer": "^2.1.0"
       }
     },
+    "emoji-regex": {
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+      "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+      "dev": true
+    },
+    "entities": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz",
+      "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==",
+      "dev": true
+    },
     "errno": {
       "version": "0.1.7",
       "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.7.tgz",
@@ -306,6 +483,149 @@
       "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
       "dev": true
     },
+    "eslint": {
+      "version": "6.8.0",
+      "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz",
+      "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==",
+      "dev": true,
+      "requires": {
+        "@babel/code-frame": "^7.0.0",
+        "ajv": "^6.10.0",
+        "chalk": "^2.1.0",
+        "cross-spawn": "^6.0.5",
+        "debug": "^4.0.1",
+        "doctrine": "^3.0.0",
+        "eslint-scope": "^5.0.0",
+        "eslint-utils": "^1.4.3",
+        "eslint-visitor-keys": "^1.1.0",
+        "espree": "^6.1.2",
+        "esquery": "^1.0.1",
+        "esutils": "^2.0.2",
+        "file-entry-cache": "^5.0.1",
+        "functional-red-black-tree": "^1.0.1",
+        "glob-parent": "^5.0.0",
+        "globals": "^12.1.0",
+        "ignore": "^4.0.6",
+        "import-fresh": "^3.0.0",
+        "imurmurhash": "^0.1.4",
+        "inquirer": "^7.0.0",
+        "is-glob": "^4.0.0",
+        "js-yaml": "^3.13.1",
+        "json-stable-stringify-without-jsonify": "^1.0.1",
+        "levn": "^0.3.0",
+        "lodash": "^4.17.14",
+        "minimatch": "^3.0.4",
+        "mkdirp": "^0.5.1",
+        "natural-compare": "^1.4.0",
+        "optionator": "^0.8.3",
+        "progress": "^2.0.0",
+        "regexpp": "^2.0.1",
+        "semver": "^6.1.2",
+        "strip-ansi": "^5.2.0",
+        "strip-json-comments": "^3.0.1",
+        "table": "^5.2.3",
+        "text-table": "^0.2.0",
+        "v8-compile-cache": "^2.0.3"
+      },
+      "dependencies": {
+        "debug": {
+          "version": "4.1.1",
+          "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
+          "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
+          "dev": true,
+          "requires": {
+            "ms": "^2.1.1"
+          }
+        },
+        "semver": {
+          "version": "6.3.0",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+          "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+          "dev": true
+        }
+      }
+    },
+    "eslint-plugin-html": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-html/-/eslint-plugin-html-6.0.0.tgz",
+      "integrity": "sha512-PQcGippOHS+HTbQCStmH5MY1BF2MaU8qW/+Mvo/8xTa/ioeMXdSP+IiaBw2+nh0KEMfYQKuTz1Zo+vHynjwhbg==",
+      "dev": true,
+      "requires": {
+        "htmlparser2": "^3.10.1"
+      }
+    },
+    "eslint-scope": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz",
+      "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==",
+      "dev": true,
+      "requires": {
+        "esrecurse": "^4.1.0",
+        "estraverse": "^4.1.1"
+      }
+    },
+    "eslint-utils": {
+      "version": "1.4.3",
+      "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz",
+      "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==",
+      "dev": true,
+      "requires": {
+        "eslint-visitor-keys": "^1.1.0"
+      }
+    },
+    "eslint-visitor-keys": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz",
+      "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==",
+      "dev": true
+    },
+    "espree": {
+      "version": "6.1.2",
+      "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz",
+      "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==",
+      "dev": true,
+      "requires": {
+        "acorn": "^7.1.0",
+        "acorn-jsx": "^5.1.0",
+        "eslint-visitor-keys": "^1.1.0"
+      }
+    },
+    "esprima": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
+      "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
+      "dev": true
+    },
+    "esquery": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.1.0.tgz",
+      "integrity": "sha512-MxYW9xKmROWF672KqjO75sszsA8Mxhw06YFeS5VHlB98KDHbOSurm3ArsjO60Eaf3QmGMCP1yn+0JQkNLo/97Q==",
+      "dev": true,
+      "requires": {
+        "estraverse": "^4.0.0"
+      }
+    },
+    "esrecurse": {
+      "version": "4.2.1",
+      "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz",
+      "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==",
+      "dev": true,
+      "requires": {
+        "estraverse": "^4.1.0"
+      }
+    },
+    "estraverse": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
+      "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
+      "dev": true
+    },
+    "esutils": {
+      "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
+      "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
+      "dev": true
+    },
     "extend": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
@@ -313,6 +633,17 @@
       "dev": true,
       "optional": true
     },
+    "external-editor": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz",
+      "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==",
+      "dev": true,
+      "requires": {
+        "chardet": "^0.7.0",
+        "iconv-lite": "^0.4.24",
+        "tmp": "^0.0.33"
+      }
+    },
     "extsprintf": {
       "version": "1.3.0",
       "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
@@ -324,15 +655,54 @@
       "version": "2.0.1",
       "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
       "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=",
-      "dev": true,
-      "optional": true
+      "dev": true
     },
     "fast-json-stable-stringify": {
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
       "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=",
+      "dev": true
+    },
+    "fast-levenshtein": {
+      "version": "2.0.6",
+      "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
+      "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=",
+      "dev": true
+    },
+    "figures": {
+      "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz",
+      "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==",
       "dev": true,
-      "optional": true
+      "requires": {
+        "escape-string-regexp": "^1.0.5"
+      }
+    },
+    "file-entry-cache": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz",
+      "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==",
+      "dev": true,
+      "requires": {
+        "flat-cache": "^2.0.1"
+      }
+    },
+    "flat-cache": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz",
+      "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==",
+      "dev": true,
+      "requires": {
+        "flatted": "^2.0.0",
+        "rimraf": "2.6.3",
+        "write": "1.0.3"
+      }
+    },
+    "flatted": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz",
+      "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==",
+      "dev": true
     },
     "forever-agent": {
       "version": "0.6.1",
@@ -359,6 +729,12 @@
       "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
       "dev": true
     },
+    "functional-red-black-tree": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
+      "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=",
+      "dev": true
+    },
     "getpass": {
       "version": "0.1.7",
       "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
@@ -383,6 +759,24 @@
         "path-is-absolute": "^1.0.0"
       }
     },
+    "glob-parent": {
+      "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz",
+      "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==",
+      "dev": true,
+      "requires": {
+        "is-glob": "^4.0.1"
+      }
+    },
+    "globals": {
+      "version": "12.3.0",
+      "resolved": "https://registry.npmjs.org/globals/-/globals-12.3.0.tgz",
+      "integrity": "sha512-wAfjdLgFsPZsklLJvOBUBmzYE8/CwhEqSBEMRXA3qxIiNtyqvjYurAtIfDh6chlEPUfmTY3MnZh5Hfh4q0UlIw==",
+      "dev": true,
+      "requires": {
+        "type-fest": "^0.8.1"
+      }
+    },
     "graceful-fs": {
       "version": "4.2.3",
       "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
@@ -425,6 +819,20 @@
       "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==",
       "dev": true
     },
+    "htmlparser2": {
+      "version": "3.10.1",
+      "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.1.tgz",
+      "integrity": "sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==",
+      "dev": true,
+      "requires": {
+        "domelementtype": "^1.3.1",
+        "domhandler": "^2.3.0",
+        "domutils": "^1.5.1",
+        "entities": "^1.1.1",
+        "inherits": "^2.0.1",
+        "readable-stream": "^3.1.1"
+      }
+    },
     "http-signature": {
       "version": "1.2.0",
       "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
@@ -437,6 +845,21 @@
         "sshpk": "^1.7.0"
       }
     },
+    "iconv-lite": {
+      "version": "0.4.24",
+      "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
+      "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
+      "dev": true,
+      "requires": {
+        "safer-buffer": ">= 2.1.2 < 3"
+      }
+    },
+    "ignore": {
+      "version": "4.0.6",
+      "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz",
+      "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==",
+      "dev": true
+    },
     "image-size": {
       "version": "0.5.5",
       "resolved": "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz",
@@ -444,6 +867,22 @@
       "dev": true,
       "optional": true
     },
+    "import-fresh": {
+      "version": "3.2.1",
+      "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz",
+      "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==",
+      "dev": true,
+      "requires": {
+        "parent-module": "^1.0.0",
+        "resolve-from": "^4.0.0"
+      }
+    },
+    "imurmurhash": {
+      "version": "0.1.4",
+      "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
+      "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=",
+      "dev": true
+    },
     "inflight": {
       "version": "1.0.6",
       "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
@@ -460,6 +899,54 @@
       "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
       "dev": true
     },
+    "inquirer": {
+      "version": "7.0.4",
+      "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz",
+      "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==",
+      "dev": true,
+      "requires": {
+        "ansi-escapes": "^4.2.1",
+        "chalk": "^2.4.2",
+        "cli-cursor": "^3.1.0",
+        "cli-width": "^2.0.0",
+        "external-editor": "^3.0.3",
+        "figures": "^3.0.0",
+        "lodash": "^4.17.15",
+        "mute-stream": "0.0.8",
+        "run-async": "^2.2.0",
+        "rxjs": "^6.5.3",
+        "string-width": "^4.1.0",
+        "strip-ansi": "^5.1.0",
+        "through": "^2.3.6"
+      }
+    },
+    "is-extglob": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+      "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
+      "dev": true
+    },
+    "is-fullwidth-code-point": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+      "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+      "dev": true
+    },
+    "is-glob": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz",
+      "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==",
+      "dev": true,
+      "requires": {
+        "is-extglob": "^2.1.1"
+      }
+    },
+    "is-promise": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz",
+      "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=",
+      "dev": true
+    },
     "is-typedarray": {
       "version": "1.0.0",
       "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
@@ -467,6 +954,12 @@
       "dev": true,
       "optional": true
     },
+    "isexe": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+      "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
+      "dev": true
+    },
     "isstream": {
       "version": "0.1.2",
       "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
@@ -489,6 +982,22 @@
       "resolved": "https://registry.npmjs.org/jquery.flot/-/jquery.flot-0.8.3.tgz",
       "integrity": "sha512-/tEE8J5NjwvStHDaCHkvTJpD7wDS4hE1OEL8xEmhgQfUe0gLUem923PIceNez1mz4yBNx6Hjv7pJcowLNd+nbg=="
     },
+    "js-tokens": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
+      "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
+      "dev": true
+    },
+    "js-yaml": {
+      "version": "3.13.1",
+      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz",
+      "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==",
+      "dev": true,
+      "requires": {
+        "argparse": "^1.0.7",
+        "esprima": "^4.0.0"
+      }
+    },
     "jsbn": {
       "version": "0.1.1",
       "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
@@ -513,8 +1022,13 @@
       "version": "0.4.1",
       "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
       "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
-      "dev": true,
-      "optional": true
+      "dev": true
+    },
+    "json-stable-stringify-without-jsonify": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
+      "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=",
+      "dev": true
     },
     "json-stringify-safe": {
       "version": "5.0.1",
@@ -562,6 +1076,16 @@
         "clean-css": "^3.0.1"
       }
     },
+    "levn": {
+      "version": "0.3.0",
+      "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
+      "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=",
+      "dev": true,
+      "requires": {
+        "prelude-ls": "~1.1.2",
+        "type-check": "~0.3.2"
+      }
+    },
     "license-checker": {
       "version": "25.0.1",
       "resolved": "https://registry.npmjs.org/license-checker/-/license-checker-25.0.1.tgz",
@@ -580,6 +1104,12 @@
         "treeify": "^1.1.0"
       }
     },
+    "lodash": {
+      "version": "4.17.15",
+      "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
+      "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==",
+      "dev": true
+    },
     "mime": {
       "version": "1.6.0",
       "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
@@ -604,6 +1134,12 @@
         "mime-db": "1.40.0"
       }
     },
+    "mimic-fn": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
+      "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
+      "dev": true
+    },
     "minimatch": {
       "version": "3.0.4",
       "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
@@ -634,6 +1170,24 @@
       "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
       "dev": true
     },
+    "mute-stream": {
+      "version": "0.0.8",
+      "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz",
+      "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==",
+      "dev": true
+    },
+    "natural-compare": {
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+      "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=",
+      "dev": true
+    },
+    "nice-try": {
+      "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz",
+      "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==",
+      "dev": true
+    },
     "nopt": {
       "version": "4.0.1",
       "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz",
@@ -672,6 +1226,29 @@
         "wrappy": "1"
       }
     },
+    "onetime": {
+      "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz",
+      "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==",
+      "dev": true,
+      "requires": {
+        "mimic-fn": "^2.1.0"
+      }
+    },
+    "optionator": {
+      "version": "0.8.3",
+      "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz",
+      "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==",
+      "dev": true,
+      "requires": {
+        "deep-is": "~0.1.3",
+        "fast-levenshtein": "~2.0.6",
+        "levn": "~0.3.0",
+        "prelude-ls": "~1.1.2",
+        "type-check": "~0.3.2",
+        "word-wrap": "~1.2.3"
+      }
+    },
     "os-homedir": {
       "version": "1.0.2",
       "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
@@ -694,12 +1271,27 @@
         "os-tmpdir": "^1.0.0"
       }
     },
+    "parent-module": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
+      "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
+      "dev": true,
+      "requires": {
+        "callsites": "^3.0.0"
+      }
+    },
     "path-is-absolute": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
       "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
       "dev": true
     },
+    "path-key": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz",
+      "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=",
+      "dev": true
+    },
     "path-parse": {
       "version": "1.0.6",
       "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
@@ -713,6 +1305,18 @@
       "dev": true,
       "optional": true
     },
+    "prelude-ls": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
+      "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=",
+      "dev": true
+    },
+    "progress": {
+      "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
+      "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
+      "dev": true
+    },
     "promise": {
       "version": "7.3.1",
       "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
@@ -741,8 +1345,7 @@
       "version": "2.1.1",
       "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
       "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
-      "dev": true,
-      "optional": true
+      "dev": true
     },
     "qs": {
       "version": "6.5.2",
@@ -779,6 +1382,17 @@
         "slash": "^1.0.0"
       }
     },
+    "readable-stream": {
+      "version": "3.6.0",
+      "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+      "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+      "dev": true,
+      "requires": {
+        "inherits": "^2.0.3",
+        "string_decoder": "^1.1.1",
+        "util-deprecate": "^1.0.1"
+      }
+    },
     "readdir-scoped-modules": {
       "version": "1.1.0",
       "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz",
@@ -791,6 +1405,12 @@
         "once": "^1.3.0"
       }
     },
+    "regexpp": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz",
+      "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==",
+      "dev": true
+    },
     "request": {
       "version": "2.88.0",
       "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz",
@@ -829,19 +1449,60 @@
         "path-parse": "^1.0.6"
       }
     },
+    "resolve-from": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
+      "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
+      "dev": true
+    },
+    "restore-cursor": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz",
+      "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==",
+      "dev": true,
+      "requires": {
+        "onetime": "^5.1.0",
+        "signal-exit": "^3.0.2"
+      }
+    },
+    "rimraf": {
+      "version": "2.6.3",
+      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz",
+      "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==",
+      "dev": true,
+      "requires": {
+        "glob": "^7.1.3"
+      }
+    },
+    "run-async": {
+      "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz",
+      "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=",
+      "dev": true,
+      "requires": {
+        "is-promise": "^2.1.0"
+      }
+    },
+    "rxjs": {
+      "version": "6.5.4",
+      "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz",
+      "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==",
+      "dev": true,
+      "requires": {
+        "tslib": "^1.9.0"
+      }
+    },
     "safe-buffer": {
       "version": "5.2.0",
       "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
       "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==",
-      "dev": true,
-      "optional": true
+      "dev": true
     },
     "safer-buffer": {
       "version": "2.1.2",
       "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
       "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
-      "dev": true,
-      "optional": true
+      "dev": true
     },
     "select2": {
       "version": "3.5.1",
@@ -859,12 +1520,52 @@
       "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
       "dev": true
     },
+    "shebang-command": {
+      "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz",
+      "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=",
+      "dev": true,
+      "requires": {
+        "shebang-regex": "^1.0.0"
+      }
+    },
+    "shebang-regex": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz",
+      "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=",
+      "dev": true
+    },
+    "signal-exit": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
+      "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=",
+      "dev": true
+    },
     "slash": {
       "version": "1.0.0",
       "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz",
       "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=",
       "dev": true
     },
+    "slice-ansi": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz",
+      "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==",
+      "dev": true,
+      "requires": {
+        "ansi-styles": "^3.2.0",
+        "astral-regex": "^1.0.0",
+        "is-fullwidth-code-point": "^2.0.0"
+      },
+      "dependencies": {
+        "is-fullwidth-code-point": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
+          "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=",
+          "dev": true
+        }
+      }
+    },
     "slide": {
       "version": "1.1.6",
       "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz",
@@ -938,6 +1639,12 @@
         "spdx-ranges": "^2.0.0"
       }
     },
+    "sprintf-js": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
+      "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=",
+      "dev": true
+    },
     "sshpk": {
       "version": "1.16.1",
       "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
@@ -956,6 +1663,60 @@
         "tweetnacl": "~0.14.0"
       }
     },
+    "string-width": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
+      "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==",
+      "dev": true,
+      "requires": {
+        "emoji-regex": "^8.0.0",
+        "is-fullwidth-code-point": "^3.0.0",
+        "strip-ansi": "^6.0.0"
+      },
+      "dependencies": {
+        "strip-ansi": {
+          "version": "6.0.0",
+          "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
+          "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
+          "dev": true,
+          "requires": {
+            "ansi-regex": "^5.0.0"
+          }
+        }
+      }
+    },
+    "string_decoder": {
+      "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
+      "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
+      "dev": true,
+      "requires": {
+        "safe-buffer": "~5.2.0"
+      }
+    },
+    "strip-ansi": {
+      "version": "5.2.0",
+      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz",
+      "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==",
+      "dev": true,
+      "requires": {
+        "ansi-regex": "^4.1.0"
+      },
+      "dependencies": {
+        "ansi-regex": {
+          "version": "4.1.0",
+          "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz",
+          "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==",
+          "dev": true
+        }
+      }
+    },
+    "strip-json-comments": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz",
+      "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==",
+      "dev": true
+    },
     "supports-color": {
       "version": "5.5.0",
       "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
@@ -965,6 +1726,64 @@
         "has-flag": "^3.0.0"
       }
     },
+    "table": {
+      "version": "5.4.6",
+      "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz",
+      "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==",
+      "dev": true,
+      "requires": {
+        "ajv": "^6.10.2",
+        "lodash": "^4.17.14",
+        "slice-ansi": "^2.1.0",
+        "string-width": "^3.0.0"
+      },
+      "dependencies": {
+        "emoji-regex": {
+          "version": "7.0.3",
+          "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz",
+          "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==",
+          "dev": true
+        },
+        "is-fullwidth-code-point": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
+          "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=",
+          "dev": true
+        },
+        "string-width": {
+          "version": "3.1.0",
+          "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz",
+          "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==",
+          "dev": true,
+          "requires": {
+            "emoji-regex": "^7.0.1",
+            "is-fullwidth-code-point": "^2.0.0",
+            "strip-ansi": "^5.1.0"
+          }
+        }
+      }
+    },
+    "text-table": {
+      "version": "0.2.0",
+      "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
+      "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=",
+      "dev": true
+    },
+    "through": {
+      "version": "2.3.8",
+      "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+      "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
+      "dev": true
+    },
+    "tmp": {
+      "version": "0.0.33",
+      "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
+      "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
+      "dev": true,
+      "requires": {
+        "os-tmpdir": "~1.0.2"
+      }
+    },
     "tough-cookie": {
       "version": "2.4.3",
       "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz",
@@ -991,6 +1810,12 @@
       "integrity": "sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==",
       "dev": true
     },
+    "tslib": {
+      "version": "1.11.0",
+      "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.0.tgz",
+      "integrity": "sha512-BmndXUtiTn/VDDrJzQE7Mm22Ix3PxgLltW9bSNLoeCY31gnG2OPx0QqJnuc9oMIKioYrz487i6K9o4Pdn0j+Kg==",
+      "dev": true
+    },
     "tunnel-agent": {
       "version": "0.6.0",
       "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
@@ -1008,16 +1833,36 @@
       "dev": true,
       "optional": true
     },
+    "type-check": {
+      "version": "0.3.2",
+      "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",
+      "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=",
+      "dev": true,
+      "requires": {
+        "prelude-ls": "~1.1.2"
+      }
+    },
+    "type-fest": {
+      "version": "0.8.1",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
+      "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
+      "dev": true
+    },
     "uri-js": {
       "version": "4.2.2",
       "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
       "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
       "dev": true,
-      "optional": true,
       "requires": {
         "punycode": "^2.1.0"
       }
     },
+    "util-deprecate": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+      "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=",
+      "dev": true
+    },
     "util-extend": {
       "version": "1.0.3",
       "resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz",
@@ -1031,6 +1876,12 @@
       "dev": true,
       "optional": true
     },
+    "v8-compile-cache": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz",
+      "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==",
+      "dev": true
+    },
     "validate-npm-package-license": {
       "version": "3.0.4",
       "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
@@ -1053,11 +1904,35 @@
         "extsprintf": "^1.2.0"
       }
     },
+    "which": {
+      "version": "1.3.1",
+      "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
+      "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==",
+      "dev": true,
+      "requires": {
+        "isexe": "^2.0.0"
+      }
+    },
+    "word-wrap": {
+      "version": "1.2.3",
+      "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
+      "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
+      "dev": true
+    },
     "wrappy": {
       "version": "1.0.2",
       "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
       "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
       "dev": true
+    },
+    "write": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz",
+      "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==",
+      "dev": true,
+      "requires": {
+        "mkdirp": "^0.5.1"
+      }
     }
   }
 }
--- a/kallithea/front-end/package.json	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/front-end/package.json	Sat May 02 21:20:43 2020 +0200
@@ -14,6 +14,8 @@
     "select2-bootstrap-css": "1.4.6"
   },
   "devDependencies": {
+    "eslint": "6.8.0",
+    "eslint-plugin-html": "6.0.0",
     "less": "3.10.3",
     "less-plugin-clean-css": "1.5.1",
     "license-checker": "25.0.1"
--- a/kallithea/front-end/style.less	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/front-end/style.less	Sat May 02 21:20:43 2020 +0200
@@ -937,8 +937,8 @@
   background-color: @kallithea-theme-main-color;
   border: 0;
 }
-#content #context-pages .follow .show-following,
-#content #context-pages .following .show-follow {
+#content .follow .show-following,
+#content .following .show-follow {
   display: none;
 }
 
--- a/kallithea/i18n/how_to	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/i18n/how_to	Sat May 02 21:20:43 2020 +0200
@@ -55,9 +55,9 @@
 
 First update the translation strings::
 
-    python2 setup.py extract_messages
+    python3 setup.py extract_messages
 
-Then regenerate the translation files. This could either be done with `python2
+Then regenerate the translation files. This could either be done with `python3
 setup.py update_catalog` or with `msgmerge` from the `gettext` package. As
 Weblate is also touching these translation files, it is preferred to use the
 same tools (`msgmerge`) and settings as Weblate to minimize the diff::
@@ -73,11 +73,11 @@
 In the prepared development environment, run the following to ensure
 all translation strings are extracted and up-to-date::
 
-    python2 setup.py extract_messages
+    python3 setup.py extract_messages
 
 Create new language by executing following command::
 
-    python2 setup.py init_catalog -l <new_language_code>
+    python3 setup.py init_catalog -l <new_language_code>
 
 This creates a new translation under directory `kallithea/i18n/<new_language_code>`
 based on the translation template file, `kallithea/i18n/kallithea.pot`.
@@ -90,7 +90,7 @@
 
 Finally, compile the translations::
 
-    python2 setup.py compile_catalog -l <new_language_code>
+    python3 setup.py compile_catalog -l <new_language_code>
 
 
 Manually updating translations
@@ -98,11 +98,11 @@
 
 Extract the latest versions of strings for translation by running::
 
-    python2 setup.py extract_messages
+    python3 setup.py extract_messages
 
 Update the PO file by doing::
 
-    python2 setup.py update_catalog -l <new_language_code>
+    python3 setup.py update_catalog -l <new_language_code>
 
 Edit the newly updated translation file. Repeat all steps after the
 `init_catalog` step from the 'new translation' instructions above.
--- a/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po	Sat May 02 21:20:43 2020 +0200
@@ -206,9 +206,6 @@
 msgid "Changeset %s not found"
 msgstr "Changeset %s werd niet gevonden"
 
-msgid "SSH key %r not found"
-msgstr "SSH key %r werd niet gevonden"
-
 msgid "Add repos"
 msgstr "Repositories toevoegen"
 
--- a/kallithea/i18n/pl/LC_MESSAGES/kallithea.po	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/i18n/pl/LC_MESSAGES/kallithea.po	Sat May 02 21:20:43 2020 +0200
@@ -36,6 +36,9 @@
 msgid "No response"
 msgstr "Brak odpowiedzi"
 
+msgid "Unknown error"
+msgstr "Nieznany błąd"
+
 msgid ""
 "The request could not be understood by the server due to malformed syntax."
 msgstr ""
@@ -125,6 +128,9 @@
 msgid "An error occurred during repository forking %s"
 msgstr "Wystąpił błąd podczas rozgałęzienia %s repozytorium"
 
+msgid "Groups"
+msgstr "Grupy"
+
 msgid "Repositories"
 msgstr "Repozytoria"
 
@@ -155,6 +161,9 @@
 msgid "Invalid password reset token"
 msgstr "Nieprawidłowy token resetowania hasła"
 
+msgid "Successfully updated password"
+msgstr "Pomyślnie zaktualizowano hasło"
+
 msgid "%s (closed)"
 msgstr "%s (zamknięty)"
 
@@ -247,6 +256,9 @@
 msgid "Error occurred during update of user %s"
 msgstr "wystąpił błąd podczas aktualizacji użytkownika %s"
 
+msgid "Error occurred during update of user password"
+msgstr "Wystąpił błąd w trakcie aktualizacji hasła użytkownika"
+
 msgid "Added email %s to user"
 msgstr "Dodano e-mail %s do użytkownika"
 
--- a/kallithea/i18n/ru/LC_MESSAGES/kallithea.po	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/i18n/ru/LC_MESSAGES/kallithea.po	Sat May 02 21:20:43 2020 +0200
@@ -12,7 +12,7 @@
 "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
 
 msgid "There are no changesets yet"
-msgstr "Ещё не было изменений"
+msgstr "Наборы изменений отсутствуют"
 
 msgid "None"
 msgstr "Ничего"
@@ -29,9 +29,30 @@
 msgid "Increase diff context to %(num)s lines"
 msgstr "Увеличить контекст до %(num)s строк"
 
+msgid "No permission to change status"
+msgstr "Недостаточно привилегий для изменения статуса"
+
+msgid "Successfully deleted pull request %s"
+msgstr "Pull-запрос %s успешно удалён"
+
 msgid "Such revision does not exist for this repository"
 msgstr "Нет такой ревизии в этом репозитории"
 
+msgid "Could not find other repository %s"
+msgstr "Не найден другой репозиторий %s"
+
+msgid "Cannot compare repositories of different types"
+msgstr "Невозможно сравнивать репозитории различных типов"
+
+msgid "Cannot show empty diff"
+msgstr "Отсутствуют изменения для отображения"
+
+msgid "No ancestor found for merge diff"
+msgstr "Не найдено предка для слияния"
+
+msgid "Multiple merge ancestors found for merge compare"
+msgstr "Найдено несколько предков для сравнения слияния"
+
 msgid "Cannot compare repositories without using common ancestor"
 msgstr "Невозможно сравнивать репозитории без общего предка"
 
@@ -64,7 +85,9 @@
 msgstr "%s выполнил коммит в %s"
 
 msgid "Changeset was too big and was cut off..."
-msgstr "Изменения оказались слишком большими и были вырезаны..."
+msgstr ""
+"Список изменений оказался слишком большим для отображения и был "
+"сокращён..."
 
 msgid "%s %s feed"
 msgstr "Лента новостей %s %s"
@@ -75,6 +98,9 @@
 msgid "Click here to add new file"
 msgstr "Нажмите чтобы добавить новый файл"
 
+msgid "There are no files yet."
+msgstr "Нет файлов."
+
 msgid "%s at %s"
 msgstr "%s (%s)"
 
@@ -182,6 +208,9 @@
 msgid "Successfully updated password"
 msgstr "Пароль обновлён"
 
+msgid "Invalid reviewer \"%s\" specified"
+msgstr "Некорректно задан ревьювер «%s»"
+
 msgid "%s (closed)"
 msgstr "%s (закрыта)"
 
@@ -192,7 +221,7 @@
 msgstr "Специальный"
 
 msgid "Peer branches"
-msgstr "Ветки участника"
+msgstr "Ветви участника"
 
 msgid "Bookmarks"
 msgstr "Закладки"
@@ -204,7 +233,16 @@
 msgstr "Произошла ошибка при создании pull-запроса"
 
 msgid "Successfully opened new pull request"
-msgstr "Pull-запрос создан успешно"
+msgstr "Pull-запрос успешно открыт"
+
+msgid "New pull request iteration created"
+msgstr "Создана новая итерация pull-запросов"
+
+msgid "Meanwhile, the following reviewers have been added: %s"
+msgstr "В то же время, добавлены следующие ревьюверы: %s"
+
+msgid "Meanwhile, the following reviewers have been removed: %s"
+msgstr "В то же время, удалены следующие ревьюверы: %s"
 
 msgid "No description"
 msgstr "Нет описания"
@@ -215,18 +253,44 @@
 msgid "Successfully deleted pull request"
 msgstr "Pull-запрос успешно удалён"
 
+msgid "Revision %s not found in %s"
+msgstr "Ревизия %s не найдена в %s"
+
+msgid "Error: changesets not found when displaying pull request from %s."
+msgstr "Ошибка: не найдены изменения при отображении pull-запроса от %s."
+
 msgid "This pull request has already been merged to %s."
 msgstr "Этот pull-запрос уже принят на ветку %s."
 
 msgid "This pull request has been closed and can not be updated."
 msgstr "Этот pull-запрос был закрыт и не может быть обновлён."
 
+msgid "The following additional changes are available on %s:"
+msgstr "Следующие дополнительные изменения доступны на %s:"
+
+msgid "No additional changesets found for iterating on this pull request."
+msgstr "Нет дополнительных изменений для итерации в этом pull-запросе."
+
 msgid "Note: Branch %s has another head: %s."
 msgstr "Внимание: Ветка %s имеет ещё одну верхушку: %s."
 
+msgid "Git pull requests don't support iterating yet."
+msgstr "Pull-запросы git пока не поддерживают итерации."
+
+msgid ""
+"Error: some changesets not found when displaying pull request from %s."
+msgstr ""
+"Ошибка: не найдены некоторые изменения при отображении pull-запроса от %s."
+
+msgid "The diff can't be shown - the PR revisions could not be found."
+msgstr "Невозможно отобразить различия — не найдены ревизии PR."
+
 msgid "Invalid search query. Try quoting it."
 msgstr "Недопустимый поисковый запрос. Попробуйте заключить его в кавычки."
 
+msgid "The server has no search index."
+msgstr "На сервере отсутствует поисковый индекс."
+
 msgid "An error occurred during search operation."
 msgstr "Произошла ошибка при выполнении этого поиска."
 
@@ -243,11 +307,14 @@
 msgstr "произошла ошибка при обновлении настроек авторизации"
 
 msgid "Default settings updated successfully"
-msgstr "Стандартные настройки успешно обновлены"
+msgstr "Настройки по умолчанию успешно обновлены"
 
 msgid "Error occurred during update of defaults"
 msgstr "Произошла ошибка при обновлении стандартных настроек"
 
+msgid "Forever"
+msgstr "Не ограничено"
+
 msgid "5 minutes"
 msgstr "5 минут"
 
@@ -272,6 +339,9 @@
 msgid "Unmodified"
 msgstr "Неизменный"
 
+msgid "Successfully updated gist content"
+msgstr "Содержимое gist-записи обновлено"
+
 msgid "Successfully updated gist data"
 msgstr "Данные gist-записи обновлены"
 
@@ -310,6 +380,12 @@
 msgid "API key successfully deleted"
 msgstr "API-ключ успешно удалён"
 
+msgid "SSH key %s successfully added"
+msgstr "Ключ SSH %s успешно добавлен"
+
+msgid "SSH key successfully deleted"
+msgstr "Ключ SSH успешно удалён"
+
 msgid "Read"
 msgstr "Чтение"
 
@@ -383,7 +459,7 @@
 msgstr "Репозиторий %s создан из %s"
 
 msgid "Forked repository %s as %s"
-msgstr "Сделан форк(копия) репозитория %s на %s"
+msgstr "Создан форк репозитория %s с именем %s"
 
 msgid "Created repository %s"
 msgstr "Репозиторий %s создан"
@@ -404,7 +480,7 @@
 msgstr "Репозиторий %s удалён"
 
 msgid "Cannot delete repository %s which still has forks"
-msgstr "Невозможно удалить %s, у него всё ещё есть форки"
+msgstr "Невозможно удалить репозиторий %s, поскольку существуют его форки"
 
 msgid "An error occurred during deletion of %s"
 msgstr "Произошла ошибка во время удаления %s"
@@ -412,11 +488,17 @@
 msgid "Repository permissions updated"
 msgstr "Привилегии репозитория обновлены"
 
+msgid "Field validation error: %s"
+msgstr "Ошибка валидации поля: %s"
+
+msgid "An error occurred during creation of field: %r"
+msgstr "Произошла ошибка при создании поля: %r"
+
 msgid "An error occurred during removal of field"
 msgstr "Произошла ошибка при удалении поля"
 
 msgid "-- Not a fork --"
-msgstr "-- Не форк --"
+msgstr "-- Не является форком --"
 
 msgid "Updated repository visibility in public journal"
 msgstr "Видимость репозитория в публичном журнале обновлена"
@@ -425,10 +507,10 @@
 msgstr "Произошла ошибка при установке репозитария в общедоступный журнал"
 
 msgid "Nothing"
-msgstr "Ничего"
+msgstr "Отсутствуют"
 
 msgid "Marked repository %s as fork of %s"
-msgstr "Репозиторий %s отмечен как форк %s"
+msgstr "Репозиторий %s отмечен как форк от %s"
 
 msgid "An error occurred during this operation"
 msgstr "Произошла ошибка при выполнении операции"
@@ -464,6 +546,9 @@
 msgid "Repositories successfully rescanned. Added: %s. Removed: %s."
 msgstr "Репозитории успешно пересканированы, добавлено: %s, удалено: %s."
 
+msgid "Invalidated %s repositories"
+msgstr "Сброшена валидация для %s репозиториев"
+
 msgid "Updated application settings"
 msgstr "Обновленные параметры настройки приложения"
 
@@ -479,6 +564,14 @@
 msgid "Send email task created"
 msgstr "Задача отправки Email создана"
 
+msgid "Hook already exists"
+msgstr "Хук уже существует"
+
+msgid "Builtin hooks are read-only. Please use another hook name."
+msgstr ""
+"Встроенные хуки предназначены только для чтения. Пожалуйста, используйте "
+"другое имя."
+
 msgid "Added new hook"
 msgstr "Добавлена новая ловушка"
 
@@ -489,7 +582,7 @@
 msgstr "произошла ошибка при создании хука"
 
 msgid "Whoosh reindex task scheduled"
-msgstr "Запланирована переиндексация базы Whoosh"
+msgstr "Переиндексация базы Whoosh успешно запланирована"
 
 msgid "Created user group %s"
 msgstr "Создана группа пользователей %s"
@@ -536,6 +629,9 @@
 msgid "An error occurred during deletion of user"
 msgstr "Произошла ошибка при удалении пользователя"
 
+msgid "The default user cannot be edited"
+msgstr "Нельзя редактировать пользователя по умолчанию"
+
 msgid "Added IP address %s to user whitelist"
 msgstr "Добавлен IP %s в белый список пользователя"
 
@@ -553,9 +649,19 @@
 msgid "You need to be signed in to view this page"
 msgstr "Страница доступна только авторизованным пользователям"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr "Обнаружена утечка CSRF-токена — истёк срок действия токенов форм"
+
 msgid "Repository not found in the filesystem"
 msgstr "Репозиторий не найден на файловой системе"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Набор изменений для %s %s не найден в %s"
+
+msgid "SSH access is disabled."
+msgstr "Доступ по SSH отключен."
+
 msgid "Binary file"
 msgstr "Двоичный файл"
 
@@ -574,9 +680,15 @@
 msgid "Created tag: %s"
 msgstr "Создан тег: %s"
 
+msgid "Changeset %s not found"
+msgstr "Набор изменений %s не найден"
+
 msgid "Show all combined changesets %s->%s"
 msgstr "Показать отличия вместе %s->%s"
 
+msgid "Compare view"
+msgstr "Сравнить вид"
+
 msgid "and"
 msgstr "и"
 
@@ -586,6 +698,9 @@
 msgid "revisions"
 msgstr "версии"
 
+msgid "Fork name %s"
+msgstr "Имя форка %s"
+
 msgid "Pull request %s"
 msgstr "Pull-запрос %s"
 
@@ -596,10 +711,10 @@
 msgstr "[создан] репозиторий"
 
 msgid "[created] repository as fork"
-msgstr "[создан] репозиторий как форк"
+msgstr "[создан] репозиторий в качестве форка"
 
 msgid "[forked] repository"
-msgstr "[форкнут] репозиторий"
+msgstr "[создан форк] репозитория"
 
 msgid "[updated] repository"
 msgstr "[обновлён] репозиторий"
@@ -626,10 +741,10 @@
 msgstr "[комментарий] к ревизии в репозитории"
 
 msgid "[commented] on pull request for"
-msgstr "[прокомментировано] в запросе на внесение изменений для"
+msgstr "[прокомментировано] в pull-запросе для"
 
 msgid "[closed] pull request for"
-msgstr "[закрыт] Pull-запрос для"
+msgstr "[закрыт] pull-запрос для"
 
 msgid "[pushed] into"
 msgstr "[отправлено] в"
@@ -644,10 +759,10 @@
 msgstr "[внесены изменения] из"
 
 msgid "[started following] repository"
-msgstr "[добавлен в наблюдения] репозиторий"
+msgstr "[подписка] на репозиторий"
 
 msgid "[stopped following] repository"
-msgstr "[удалён из наблюдения] репозиторий"
+msgstr "[отписка] от репозитория"
 
 msgid " and %s more"
 msgstr " и на %s больше"
@@ -679,11 +794,28 @@
 "переименован из файловой системы. Пожалуйста, перезапустите приложение "
 "для сканирования репозиториев"
 
+msgid "SSH key is missing"
+msgstr "Отсутствует ключ SSH"
+
+msgid "Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'"
+msgstr ""
+"Некорректный ключ SSH — он должен начинаться с 'ssh-(rsa|dss|ed25519)'"
+
+msgid "Incorrect SSH key - unexpected characters in base64 part %r"
+msgstr ""
+"Некорректный ключ SSH — присутствуют некорректные символы в коде base64 %r"
+
+msgid "Incorrect SSH key - failed to decode base64 part %r"
+msgstr "Некорректный ключ SSH — ошибка декодирования кода base64 %r"
+
+msgid "Incorrect SSH key - base64 part is not %r as claimed but %r"
+msgstr "Некорректный ключ SSH — код base64 соответствует не %r, а %r"
+
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d год"
-msgstr[1] "%d лет"
-msgstr[2] "%d года"
+msgstr[1] "%d года"
+msgstr[2] "%d лет"
 
 msgid "%d month"
 msgid_plural "%d months"
@@ -700,14 +832,14 @@
 msgid "%d hour"
 msgid_plural "%d hours"
 msgstr[0] "%d час"
-msgstr[1] "%d часов"
-msgstr[2] "%d часа"
+msgstr[1] "%d часа"
+msgstr[2] "%d часов"
 
 msgid "%d minute"
 msgid_plural "%d minutes"
 msgstr[0] "%d минута"
-msgstr[1] "%d минут"
-msgstr[2] "%d минуты"
+msgstr[1] "%d минуты"
+msgstr[2] "%d минут"
 
 msgid "%d second"
 msgid_plural "%d seconds"
@@ -728,7 +860,7 @@
 msgstr "%s и %s назад"
 
 msgid "just now"
-msgstr "прямо сейчас"
+msgstr "только что"
 
 msgid "on line %s"
 msgstr "на строке %s"
@@ -742,6 +874,10 @@
 msgid "Kallithea Administrator"
 msgstr "Администратор Kallithea"
 
+msgid "Default user has no access to new repositories"
+msgstr ""
+"Неавторизованные пользователи не имеют прав доступа к новым репозиториям"
+
 msgid "Default user has read access to new repositories"
 msgstr "Неавторизованные пользователи имеют право чтения новых репозиториев"
 
@@ -749,12 +885,103 @@
 msgstr ""
 "Неавторизованные пользователи имеют право записи в новые репозитории"
 
+msgid "Default user has admin access to new repositories"
+msgstr ""
+"Неавторизованные пользователи имеют права администратора к новым "
+"репозиториям"
+
+msgid "Default user has no access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи не имеют прав доступа к новым группам "
+"репозиториев"
+
+msgid "Default user has read access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи имеют право чтения в новых группах "
+"репозиториев"
+
+msgid "Default user has write access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи имеют право записи в новых группах "
+"репозиториев"
+
+msgid "Default user has admin access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи имеют права администратора к новым групппам "
+"репозиториев"
+
+msgid "Default user has no access to new user groups"
+msgstr ""
+"Неавторизованные пользователи не имеют прав доступа к новым группам "
+"пользователей"
+
+msgid "Default user has read access to new user groups"
+msgstr ""
+"Неавторизованные пользователи имеют право чтения в новых группах "
+"пользователей"
+
+msgid "Default user has write access to new user groups"
+msgstr ""
+"Неавторизованные пользователи имеют право записи в новых группах "
+"пользователей"
+
+msgid "Default user has admin access to new user groups"
+msgstr ""
+"Неавторизованные пользователи имеют права администратора к новым групппам "
+"пользователей"
+
 msgid "Only admins can create repository groups"
-msgstr "Только администраторы могут создавать группы"
+msgstr "Только администраторы могут создавать группы репозиториев"
+
+msgid "Non-admins can create repository groups"
+msgstr "Группы репозиториев могут создаваться любыми пользователями"
+
+msgid "Only admins can create user groups"
+msgstr "Группы пользователей могут создаваться только администраторами"
+
+msgid "Non-admins can create user groups"
+msgstr "Группы пользователей могут создаваться любыми пользователями"
+
+msgid "Only admins can create top level repositories"
+msgstr "Только администраторы могут создавать репозитории верхнего уровня"
+
+msgid "Non-admins can create top level repositories"
+msgstr "Любой пользователь может создавать репозитории верхнего уровня"
+
+msgid ""
+"Repository creation enabled with write permission to a repository group"
+msgstr ""
+"Создание репозиториев доступно с правом на запись в группу репозиториев"
+
+msgid ""
+"Repository creation disabled with write permission to a repository group"
+msgstr ""
+"Создание репозиториев недоступно с правом на запись в группу репозиториев"
+
+msgid "Only admins can fork repositories"
+msgstr "Форки репозиториев могут создаваться только администраторами"
+
+msgid "Non-admins can fork repositories"
+msgstr "Форки репозиториев могут создаваться любыми пользователями"
 
 msgid "Registration disabled"
 msgstr "Регистрация отключена"
 
+msgid "User registration with manual account activation"
+msgstr "Регистрация пользователя с ручной активацией учётной записи"
+
+msgid "User registration with automatic account activation"
+msgstr "Регистрация пользователя с автоматической активацией"
+
+msgid "Not reviewed"
+msgstr "Не проверено"
+
+msgid "Under review"
+msgstr "На проверке"
+
+msgid "Not approved"
+msgstr "Не одобрено"
+
 msgid "Approved"
 msgstr "Одобрено"
 
@@ -770,19 +997,94 @@
 msgid "Enter %(min)i characters or more"
 msgstr "Введите не менее %(min)i символов"
 
+msgid "Name must not contain only digits"
+msgstr "Имя не может состоять только из цифр"
+
+msgid ""
+"[Comment] %(repo_name)s changeset %(short_id)s \"%(message_short)s\" on "
+"%(branch)s"
+msgstr ""
+"[Комментарий] к набору изменений %(short_id)s «%(message_short)s» "
+"репозитория %(repo_name)s в %(branch)s"
+
 msgid "New user %(new_username)s registered"
 msgstr "Новый пользователь \"%(new_username)s\" зарегистрирован"
 
+msgid ""
+"[Review] %(repo_name)s PR %(pr_nice_id)s \"%(pr_title_short)s\" from "
+"%(pr_source_branch)s by %(pr_owner_username)s"
+msgstr ""
+"[Ревью] к PR %(pr_nice_id)s «%(pr_title_short)s» из %(pr_source_branch)s "
+"репозитория %(repo_name)s от %(pr_owner_username)s"
+
+msgid ""
+"[Comment] %(repo_name)s PR %(pr_nice_id)s \"%(pr_title_short)s\" from "
+"%(pr_source_branch)s by %(pr_owner_username)s"
+msgstr ""
+"[Комментарий] к PR %(pr_nice_id)s «%(pr_title_short)s» из "
+"%(pr_source_branch)s репозитория %(repo_name)s от %(pr_owner_username)s"
+
 msgid "Closing"
 msgstr "Закрыт"
 
+msgid ""
+"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
+msgstr ""
+"%(user)s просит вас рассмотреть pull-запрос %(pr_nice_id)s: %(pr_title)s"
+
+msgid "Cannot create empty pull request"
+msgstr "Невозможно создать пустой pull-запрос"
+
+msgid ""
+"Cannot create pull request - criss cross merge detected, please merge a "
+"later %s revision to %s"
+msgstr ""
+"Невозможно создать pull-запрос — обнаружено перекрёстное слияние. "
+"Попробуйте слить более позднюю ревизию %s с %s"
+
+msgid "You are not authorized to create the pull request"
+msgstr "Недостаточно привилегий для создания pull-запроса"
+
+msgid "Missing changesets since the previous iteration:"
+msgstr "Отсутствующие ревизии относительно предыдущей итерации:"
+
+msgid "New changesets on %s %s since the previous iteration:"
+msgstr "Новые наборы изменений в %s %s относительно предыдущей итерации:"
+
+msgid "Ancestor didn't change - diff since previous iteration:"
+msgstr "Предок не изменился — разница с момента последней итерации:"
+
+msgid ""
+"This iteration is based on another %s revision and there is no simple "
+"diff."
+msgstr ""
+"Эта итерация основана на другой ревизии %s, простой diff невозможен."
+
+msgid "No changes found on %s %s since previous iteration."
+msgstr "Нет изменений на %s %s относительно предыдущей итерации."
+
+msgid "Closed, next iteration: %s ."
+msgstr "Закрыто. Следующая итерация: %s."
+
 msgid "latest tip"
 msgstr "последняя версия"
 
+msgid "SSH key %r is invalid: %s"
+msgstr "Ошибка ключа SSH %r: %s"
+
+msgid "SSH key %s is already used by %s"
+msgstr "Ключ SSH %s уже используется пользователем %s"
+
 msgid "New user registration"
 msgstr "Регистрация нового пользователя"
 
 msgid ""
+"You can't remove this user since it is crucial for the entire application"
+msgstr ""
+"Вы не можете удалить этого пользователя, поскольку это критично для "
+"работы всего приложения"
+
+msgid ""
 "User \"%s\" still owns %s repositories and cannot be removed. Switch "
 "owners or remove those repositories: %s"
 msgstr ""
@@ -808,12 +1110,34 @@
 msgid "Password reset link"
 msgstr "Ссылка сброса пароля"
 
+msgid "Password reset notification"
+msgstr "Уведомление о сбросе пароля"
+
+msgid ""
+"The password to your account %s has been changed using password reset "
+"form."
+msgstr "Пароль к вашему аккаунту %s был изменён через форму сброса пароля."
+
 msgid "Value cannot be an empty list"
 msgstr "Значение не может быть пустым списком"
 
 msgid "Username \"%(username)s\" already exists"
 msgstr "Пользователь с именем \"%(username)s\" уже существует"
 
+msgid "Username \"%(username)s\" cannot be used"
+msgstr "Имя «%(username)s» недопустимо"
+
+msgid ""
+"Username may only contain alphanumeric characters underscores, periods or "
+"dashes and must begin with an alphanumeric character or underscore"
+msgstr ""
+"Имя пользователя может содержать только буквы, цифры, символы "
+"подчеркивания, точки и тире, а также должно начинаться с буквы, цифры или "
+"с символа подчеркивания"
+
+msgid "The input is not valid"
+msgstr "Введено некорректное значение"
+
 msgid "Username %(username)s is not valid"
 msgstr "Имя \"%(username)s\" недопустимо"
 
@@ -848,6 +1172,12 @@
 msgid "Passwords do not match"
 msgstr "Пароли не совпадают"
 
+msgid "Invalid username or password"
+msgstr "Неверное имя пользователя или пароль"
+
+msgid "Repository name %(repo)s is not allowed"
+msgstr "Имя репозитория %(repo)s недопустимо"
+
 msgid "Repository named %(repo)s already exists"
 msgstr "Репозитарий %(repo)s уже существует"
 
@@ -857,8 +1187,18 @@
 msgid "Repository group with name \"%(repo)s\" already exists"
 msgstr "Группа репозиториев \"%(repo)s\" уже существует"
 
+msgid "Invalid repository URL"
+msgstr "Недопустимый URL репозитория"
+
+msgid ""
+"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
+"svn+https URL"
+msgstr ""
+"Недопустимый URL репозитория. Требуется корректный http, https, ssh, svn"
+"+http или svn+https URL"
+
 msgid "Fork has to be the same type as parent"
-msgstr "Тип форка будет совпадать с родительским"
+msgstr "Форк будет иметь тот же тип, что и родительский"
 
 msgid "You don't have permissions to create repository in this group"
 msgstr "У вас недостаточно прав для создания репозиториев в этой группе"
@@ -875,6 +1215,12 @@
 msgid "This is not a valid path"
 msgstr "Этот путь ошибочен"
 
+msgid "This email address is already in use"
+msgstr "Этот адрес почты уже занят"
+
+msgid "Email address \"%(email)s\" not found"
+msgstr "Адрес «%(email)s» не зарегистрирован"
+
 msgid ""
 "The LDAP Login attribute of the CN must be specified - this is the name "
 "of the attribute that is equivalent to \"username\""
@@ -899,6 +1245,9 @@
 msgid "Filename cannot be inside a directory"
 msgstr "Файла нет в каталоге"
 
+msgid "Plugins %(loaded)s and %(next_to_load)s both export the same name"
+msgstr "Плагины %(loaded)s и %(next_to_load)s экспортируют одно и то же имя"
+
 msgid "About"
 msgstr "О программе"
 
@@ -942,6 +1291,9 @@
 msgid "Password"
 msgstr "Пароль"
 
+msgid "Stay logged in after browser restart"
+msgstr "Оставаться авторизованным"
+
 msgid "Forgot your password ?"
 msgstr "Забыли пароль?"
 
@@ -969,6 +1321,35 @@
 msgid "Send Password Reset Email"
 msgstr "Послать ссылку сброса пароля"
 
+msgid ""
+"A password reset link will be sent to the specified email address if it "
+"is registered in the system."
+msgstr ""
+"Ссылка для сброса пароля была отправлена на соответствующий e-mail, если "
+"он был зарегистрирован в системе."
+
+msgid "You are about to set a new password for the email address %s."
+msgstr "Вы собираетесь установить новый пароль для адреса %s."
+
+msgid ""
+"Note that you must use the same browser session for this as the one used "
+"to request the password reset."
+msgstr ""
+"Обратите внимание, что вы должны оставаться в пределах этой сессии "
+"браузера, поскольку в ней был запрошен сброс пароля."
+
+msgid "Code you received in the email"
+msgstr "Код, который вы получили по почте"
+
+msgid "New Password"
+msgstr "Новый пароль"
+
+msgid "Confirm New Password"
+msgstr "Подтверждение пароля"
+
+msgid "Confirm"
+msgstr "Подтвердить"
+
 msgid "Sign Up"
 msgstr "Регистрация"
 
@@ -987,6 +1368,11 @@
 msgid "Email"
 msgstr "E-mail"
 
+msgid "Registered accounts are ready to use and need no further action."
+msgstr ""
+"Зарегистрированные аккаунты готовы к использованию и не требуют "
+"дальнейших действий."
+
 msgid "Please wait for an administrator to activate your account."
 msgstr ""
 "Пожалуйста, подождите, пока администратор подтвердит Вашу регистрацию."
@@ -995,7 +1381,7 @@
 msgstr "Журнал администратора"
 
 msgid "journal filter..."
-msgstr "Фильтр журнала..."
+msgstr "фильтр..."
 
 msgid "Filter"
 msgstr "Отфильтровать"
@@ -1003,8 +1389,8 @@
 msgid "%s Entry"
 msgid_plural "%s Entries"
 msgstr[0] "%s запись"
-msgstr[1] "%s записей"
-msgstr[2] "%s записи"
+msgstr[1] "%s записи"
+msgstr[2] "%s записей"
 
 msgid "Action"
 msgstr "Действие"
@@ -1013,7 +1399,7 @@
 msgstr "Дата"
 
 msgid "From IP"
-msgstr "С IP"
+msgstr "IP"
 
 msgid "No actions yet"
 msgstr "Действия ещё не производились"
@@ -1030,6 +1416,13 @@
 msgid "Enabled Plugins"
 msgstr "Включенные плагины"
 
+msgid ""
+"Comma-separated list of plugins; Kallithea will try user authentication "
+"in plugin order"
+msgstr ""
+"Список плагинов через запятую. Kallithea будет аутентифицировать "
+"пользователя в порядке указания плагинов"
+
 msgid "Available built-in plugins"
 msgstr "Доступные встроенные плагины"
 
@@ -1068,12 +1461,28 @@
 msgid "Edit Gist"
 msgstr "Правка gist-записи"
 
+msgid ""
+"Gist was updated since you started editing. Copy your changes and click "
+"%(here)s to reload new version."
+msgstr ""
+"Gist был изменён с момента начала редактирования. Скопируйте свои правки "
+"и нажмите %(here)s для загрузки новой версии."
+
+msgid "here"
+msgstr "сюда"
+
 msgid "Gist description ..."
 msgstr "Описание..."
 
+msgid "Gist lifetime"
+msgstr "Время жизни gist`а"
+
 msgid "Expires"
 msgstr "Истекает"
 
+msgid "Never"
+msgstr "никогда"
+
 msgid "Update Gist"
 msgstr "Обновить"
 
@@ -1098,6 +1507,18 @@
 msgid "There are no gists yet"
 msgstr "Gist-записи отсутствуют"
 
+msgid "New Gist"
+msgstr "Новый gist"
+
+msgid "Name this gist ..."
+msgstr "Назовите этот gist…"
+
+msgid "Create Private Gist"
+msgstr "Создать приватный gist"
+
+msgid "Create Public Gist"
+msgstr "Создать публичный gist"
+
 msgid "Reset"
 msgstr "Сброс"
 
@@ -1107,6 +1528,12 @@
 msgid "URL"
 msgstr "URL"
 
+msgid "Public Gist"
+msgstr "Публичный Gist"
+
+msgid "Private Gist"
+msgstr "Приватный Gist"
+
 msgid "Delete"
 msgstr "Удалить"
 
@@ -1126,17 +1553,76 @@
 msgstr "Показать только текст"
 
 msgid "My Account"
-msgstr "Мой Аккаунт"
+msgstr "Мой аккаунт"
 
 msgid "Profile"
 msgstr "Профиль"
 
+msgid "Email Addresses"
+msgstr "Новый E-mail"
+
+msgid "SSH Keys"
+msgstr "Ключи SSH"
+
 msgid "API Keys"
 msgstr "API-ключи"
 
+msgid "Owned Repositories"
+msgstr "Свои репозитории"
+
+msgid "Watched Repositories"
+msgstr "Наблюдаемые репозитории"
+
+msgid "Show Permissions"
+msgstr "Права доступа"
+
+msgid "Built-in"
+msgstr "Встроенный"
+
+msgid "Confirm to reset this API key: %s"
+msgstr "Подтвердите сброс этого API-ключа: %s"
+
+msgid "Expired"
+msgstr "Срок действия истёк"
+
+msgid "Confirm to remove this API key: %s"
+msgstr "Подтвердите удаление этого API-ключа: %s"
+
+msgid "Remove"
+msgstr "Удалить"
+
+msgid "No additional API keys specified"
+msgstr "Дополнительные API-ключи не указаны"
+
+msgid "New API key"
+msgstr "Новый API-ключ"
+
 msgid "Add"
 msgstr "Добавить"
 
+msgid ""
+"\n"
+"API keys are used to let scripts or services access %s using your\n"
+"account, as if you had provided the script or service with your actual\n"
+"password.\n"
+msgstr ""
+"\n"
+"Ключи API позволяют скриптам или сервисам получать \n"
+"доступ к %s от имени вашего аккаунта, как если бы вы \n"
+"указали в скрипте или сервисе свой реальный пароль.\n"
+
+msgid ""
+"\n"
+"Like passwords, API keys should therefore never be shared with others,\n"
+"nor passed to untrusted scripts or services. If such sharing should\n"
+"happen anyway, reset the API key on this page to prevent further use.\n"
+msgstr ""
+"\n"
+"Как и пароли, ключи API не следует передавать третьим лицам,\n"
+"ненадёжным скриптам и сервисам. Если это всё же произошло, \n"
+"сбросьте ключ на этой странице, чтобы предотвратить\n"
+"его дальнейшее использование.\n"
+
 msgid "Primary"
 msgstr "Основной"
 
@@ -1161,20 +1647,57 @@
 msgid "Confirm new password"
 msgstr "Подтвердите новый пароль"
 
+msgid ""
+"This account is managed with %s and the password cannot be changed here"
+msgstr "Этим аккаунтом управляет %s, поэтому здесь нельзя сменить пароль"
+
+msgid "Current IP"
+msgstr "Текущий IP-адрес"
+
+msgid "Gravatar"
+msgstr "Grаvatar"
+
+msgid "Change %s avatar at"
+msgstr "Измените аватар %s на"
+
 msgid "Avatars are disabled"
 msgstr "Аватары отключены"
 
 msgid "Repositories You Own"
-msgstr "Репозитории, где Вы — владелец"
+msgstr "Ваши репозитории"
 
 msgid "Name"
 msgstr "Имя"
 
+msgid "Fingerprint"
+msgstr "Отпечаток"
+
+msgid "Last Used"
+msgstr "Использовался в предыдущий раз"
+
+msgid "Confirm to remove this SSH key: %s"
+msgstr "Подтвердите удаление этого ключа SSH: %s"
+
+msgid "No SSH keys have been added"
+msgstr "Ключи SSH не были добавлены"
+
+msgid "New SSH key"
+msgstr "Новый ключ SSH"
+
+msgid "Public key"
+msgstr "Публичный ключ"
+
+msgid "Public key (contents of e.g. ~/.ssh/id_rsa.pub)"
+msgstr "Публичный ключ (например, из файла ~/.ssh/id_rsa.pub)"
+
 msgid "Repositories You are Watching"
-msgstr "Репозитории, за которыми Вы наблюдаете"
+msgstr "Репозитории, за которыми вы наблюдаете"
 
 msgid "Default Permissions"
-msgstr "Стандартные привилегии"
+msgstr "Права по умолчанию"
+
+msgid "Global"
+msgstr "Глобальные"
 
 msgid "IP Whitelist"
 msgstr "Белый список IP"
@@ -1182,6 +1705,16 @@
 msgid "Anonymous access"
 msgstr "Анонимный доступ"
 
+msgid "Allow anonymous access"
+msgstr "Разрешить анонимный доступ"
+
+msgid ""
+"Allow access to Kallithea without needing to log in. Anonymous users use "
+"%s user permissions."
+msgstr ""
+"Разрешить доступ к Kallithea без авторизации. Анонимные пользователи "
+"будут использовать права доступа пользователя %s."
+
 msgid ""
 "All default permissions on each repository will be reset to chosen "
 "permission, note that all custom default permission on repositories will "
@@ -1191,6 +1724,12 @@
 "репозитория. Учтите, что ранее установленные привилегии по умолчанию "
 "будут сброшены"
 
+msgid "Apply to all existing repositories"
+msgstr "Применить ко всем репозиториям"
+
+msgid "Permissions for the Default user on new repositories."
+msgstr "Права пользователя по умолчанию для новых репозиториев."
+
 msgid "Repository group"
 msgstr "Группа репозиториев"
 
@@ -1203,21 +1742,82 @@
 "репозиториев. Учтите, что ранее установленные привилегии по умолчанию для "
 "групп репозиториев будут сброшены"
 
+msgid "Apply to all existing repository groups"
+msgstr "Применить ко всем группам репозиториев"
+
+msgid "Permissions for the Default user on new repository groups."
+msgstr "Права пользователя по умолчанию для новых групп репозиториев."
+
 msgid "User group"
 msgstr "Группа пользователей"
 
+msgid ""
+"All default permissions on each user group will be reset to chosen "
+"permission, note that all custom default permission on user groups will "
+"be lost"
+msgstr ""
+"Выбранные привилегии будут установлены по умолчанию для каждой группы "
+"пользователей. Учтите, что ранее установленные привилегии по умолчанию "
+"для групп пользователей будут сброшены"
+
+msgid "Apply to all existing user groups"
+msgstr "Применить ко всем группам пользователей"
+
+msgid "Permissions for the Default user on new user groups."
+msgstr "Права пользователя по умолчанию для новых групп пользователей."
+
+msgid "Top level repository creation"
+msgstr "Создание репозитория верхнего уровня"
+
+msgid ""
+"Enable this to allow non-admins to create repositories at the top level."
+msgstr ""
+"Включите, чтобы разрешить всем пользователям создавать репозитории на "
+"верхнем уровне."
+
+msgid ""
+"Note: This will also give all users API access to create repositories "
+"everywhere. That might change in future versions."
+msgstr ""
+"Внимание: это также позволит всем пользователям с помощью API создавать "
+"репозитории где угодно. Это может измениться в будущих версиях."
+
+msgid "Repository creation with group write access"
+msgstr "Создание репозитория с правом записи в группы"
+
+msgid ""
+"With this, write permission to a repository group allows creating "
+"repositories inside that group. Without this, group write permissions "
+"mean nothing."
+msgstr ""
+"С этой опцией, право записи в группу репозиториев позволяет создавать "
+"репозитории в этой группе. Без неё, право записи в группу не имеет "
+"действия."
+
 msgid "User group creation"
 msgstr "Создание групп пользователей"
 
+msgid "Enable this to allow non-admins to create user groups."
+msgstr ""
+"Включите для возможности создавать группы пользователей любым "
+"пользователям."
+
 msgid "Repository forking"
 msgstr "Создание форка репозитория"
 
+msgid "Enable this to allow non-admins to fork repositories."
+msgstr ""
+"Включите для возможности создавать форки репозиториев любым пользователем."
+
 msgid "Registration"
 msgstr "Регистрация"
 
 msgid "External auth account activation"
 msgstr "Активация сторонней учетной записи"
 
+msgid "Confirm to delete this IP address: %s"
+msgstr "Подтвердите удаление IP-адреса: %s"
+
 msgid "All IP addresses are allowed."
 msgstr "Все IP-адреса разрешены."
 
@@ -1233,6 +1833,12 @@
 msgid "Group parent"
 msgstr "Родительская группа"
 
+msgid "Copy parent group permissions"
+msgstr "Скопировать родительские права доступа"
+
+msgid "Copy permission set from parent repository group."
+msgstr "Скопировать набор прав доступа из родительской группы репозиториев."
+
 msgid "%s Repository Group Settings"
 msgstr "Настройки группы репозиториев %s"
 
@@ -1243,14 +1849,23 @@
 msgstr "Настройки"
 
 msgid "Advanced"
-msgstr "Дополнительно"
+msgstr "Продвинутые"
 
 msgid "Permissions"
-msgstr "Привилегии"
+msgstr "Права доступа"
 
 msgid "Repository Group: %s"
 msgstr "Группа репозиториев: %s"
 
+msgid "Top level repositories"
+msgstr "Репозитории верхнего уровня"
+
+msgid "Total repositories"
+msgstr "Всего репозиториев"
+
+msgid "Children groups"
+msgstr "Дочерние группы"
+
 msgid "Created on"
 msgstr "Создано"
 
@@ -1261,23 +1876,70 @@
 msgstr[2] "Подтвердите удаление группы %s, содержащей %s репозиториев"
 
 msgid "Delete this repository group"
-msgstr "Удалить эту группу репозиториев"
+msgstr "Удалить группу репозиториев"
+
+msgid "Not visible"
+msgstr "Невидимый"
+
+msgid "Visible"
+msgstr "Видимый"
+
+msgid "Add repos"
+msgstr "Добавлять репозитории"
+
+msgid "Add/Edit groups"
+msgstr "Добавлять/Редактировать группы"
+
+msgid "User/User Group"
+msgstr "Пользователь/Группа"
+
+msgid "Default"
+msgstr "По умолчанию"
+
+msgid "Revoke"
+msgstr "Отозвать"
 
 msgid "Add new"
 msgstr "Добавить новый"
 
+msgid "Apply to children"
+msgstr "Применить к дочерним"
+
+msgid "Both"
+msgstr "Все"
+
+msgid ""
+"Set or revoke permission to all children of that group, including non-"
+"private repositories and other groups if selected."
+msgstr ""
+"Установить или отозвать права всех дочерних элементов этой группы, "
+"включая публичные репозитории и другие группы, если они выбраны."
+
 msgid "Remove this group"
-msgstr "Удалить эту группу"
+msgstr "Удалить группу"
 
 msgid "Confirm to delete this group"
 msgstr "Подтвердите удаление этой группы пользователей"
 
+msgid "Repository group %s"
+msgstr "Группа репозиториев %s"
+
 msgid "Repository Groups Administration"
 msgstr "Администрирование групп репозиториев"
 
 msgid "Number of Top-level Repositories"
 msgstr "Число репозиториев верхнего уровня"
 
+msgid "Clone remote repository"
+msgstr "Клонировать удалённый репозиторий"
+
+msgid ""
+"Optional: URL of a remote repository. If set, the repository will be "
+"created as a clone from this URL."
+msgstr ""
+"Опционально: URL удалённого репозитория. Если параметр задан, то будет "
+"создан клон репозитория, расположенного по этому адресу."
+
 msgid ""
 "Keep it short and to the point. Use a README file for longer descriptions."
 msgstr ""
@@ -1292,17 +1954,44 @@
 msgid "Landing revision"
 msgstr "Ревизия для выгрузки"
 
+msgid ""
+"Default revision for files page, downloads, full text search index and "
+"readme generation"
+msgstr ""
+"Ревизия по умолчанию для страницы файлов, загрузки, полнотекстовый "
+"поисковый индекс и генерация readme"
+
 msgid "%s Creating Repository"
 msgstr "Создание репозитория %s"
 
+msgid "Creating repository"
+msgstr "Создание репозитория"
+
+msgid ""
+"Repository \"%(repo_name)s\" is being created, you will be redirected "
+"when this process is finished.repo_name"
+msgstr ""
+"Репозиторий «%(repo_name)s» создаётся. Вы будете перенаправлены, когда "
+"процесс завершится."
+
+msgid ""
+"We're sorry but error occurred during this operation. Please check your "
+"Kallithea server logs, or contact administrator."
+msgstr ""
+"К сожалению, во время данной операции произошла ошибка. Пожалуйста, "
+"проверьте журнал сервера Kallithea или свяжитесь с администратором."
+
 msgid "%s Repository Settings"
 msgstr "Настройки репозитория %s"
 
 msgid "Extra Fields"
 msgstr "Дополнительные поля"
 
+msgid "Caches"
+msgstr "Кэш"
+
 msgid "Remote"
-msgstr "Удалённый"
+msgstr "Удалённый репозиторий"
 
 msgid "Statistics"
 msgstr "Статистика"
@@ -1314,7 +2003,7 @@
 msgstr "Набор"
 
 msgid "Manually set this repository as a fork of another from the list."
-msgstr "Вручную сделать этот репозиторий форком выбранного из списка."
+msgstr "Вручную задать этот репозиторий форком репозитория из этого списка."
 
 msgid "Public Journal Visibility"
 msgstr "Доступ к публичному журналу"
@@ -1340,15 +2029,24 @@
 
 msgid "This repository has %s fork"
 msgid_plural "This repository has %s forks"
-msgstr[0] "Данный репозиторий имеет %s копию"
-msgstr[1] "Данный репозиторий имеет %s копии"
-msgstr[2] "Данный репозиторий имеет %s копий"
+msgstr[0] "Данный репозиторий имеет %s форк"
+msgstr[1] "Данный репозиторий имеет %s форка"
+msgstr[2] "Данный репозиторий имеет %s форков"
 
 msgid "Detach forks"
-msgstr "Отсоединить fork'и"
+msgstr "Отделить форки"
 
 msgid "Delete forks"
-msgstr "Удалить fork'и"
+msgstr "Удалить форки"
+
+msgid ""
+"The deleted repository will be moved away and hidden until the "
+"administrator expires it. The administrator can both permanently delete "
+"it or restore it."
+msgstr ""
+"Удаляемый репозиторий будет перемещён и скрыт на срок, определяемый "
+"администратором. Администратор может либо удалить, либо восстановить "
+"репозиторий."
 
 msgid "Invalidate Repository Cache"
 msgstr "Сбросить кэш репозитория"
@@ -1371,6 +2069,9 @@
 msgid "Active"
 msgstr "Активный"
 
+msgid "Label"
+msgstr "Имя"
+
 msgid "Confirm to delete this field: %s"
 msgstr "Подтвердите удаление этого поля: %s"
 
@@ -1392,14 +2093,72 @@
 msgid "Extra fields are disabled."
 msgstr "Дополнительные поля отключены."
 
+msgid "Private Repository"
+msgstr "Приватный репозиторий"
+
+msgid "Fork of repository"
+msgstr "Форк репозитория"
+
+msgid "Remote repository URL"
+msgstr "Ссылка на удалённый репозиторий"
+
+msgid "Pull Changes from Remote Repository"
+msgstr "Изменения из удалённого репозитория"
+
+msgid "Confirm to pull changes from remote repository."
+msgstr "Подтвердите применение изменений из удалённого репозитория."
+
+msgid "This repository does not have a remote repository URL."
+msgstr "Данный репозиторий не имеет URL удалённого репозитория."
+
+msgid "Permanent URL"
+msgstr "Постоянный URL"
+
+msgid ""
+"In case this repository is renamed or moved into another group the "
+"repository URL changes.\n"
+"                               Using the above permanent URL guarantees "
+"that this repository always will be accessible on that URL.\n"
+"                               This is useful for CI systems, or any "
+"other cases that you need to hardcode the URL into a 3rd party service."
+msgstr ""
+"В случае, когда репозиторий переименовывается или перемещается в другую "
+"группу, URL репозитория изменяется.\n"
+"                               Использование постоянного URL гарантирует, "
+"что данный репозиторий всегда будет доступен по этому URL.\n"
+"                               Это может быть полезно в CI-системах, или "
+"в любом другом случае, требующем встраивания URL в код ПО."
+
+msgid "Remote repository"
+msgstr "Удалённый репозиторий"
+
+msgid "Repository URL"
+msgstr "URL репозитория"
+
+msgid ""
+"Optional: URL of a remote repository. If set, the repository can be "
+"pulled from this URL."
+msgstr ""
+"Опционально: URL удалённого репозитория. Если задан, то репозиторий можно "
+"получить по заданному адресу."
+
 msgid "Default revision for files page, downloads, whoosh and readme"
 msgstr ""
 "Ревизия по умолчанию, из которой будет производиться выгрузка файлов при "
 "скачивании"
 
+msgid "Type name of user"
+msgstr "Введите имя пользователя"
+
 msgid "Change owner of this repository."
 msgstr "Изменить владельца репозитория."
 
+msgid "Processed commits"
+msgstr "Обработанные фиксации"
+
+msgid "Processed progress"
+msgstr "Обработанный прогресс"
+
 msgid "Reset Statistics"
 msgstr "Сброс статистики"
 
@@ -1409,48 +2168,185 @@
 msgid "Repositories Administration"
 msgstr "Администрирование репозиториев"
 
+msgid "State"
+msgstr "Состояние"
+
 msgid "Settings Administration"
 msgstr "Администрирование настроек"
 
+msgid "VCS"
+msgstr "Контроль версий"
+
+msgid "Remap and Rescan"
+msgstr "Пересканирование"
+
+msgid "Visual"
+msgstr "Вид"
+
 msgid "Hooks"
 msgstr "Хуки"
 
+msgid "Full Text Search"
+msgstr "Полнотекстовый поиск"
+
+msgid "System Info"
+msgstr "О системе"
+
+msgid "Send test email to"
+msgstr "Отправлять пробное сообщение на адрес"
+
 msgid "Send"
 msgstr "Отправить"
 
 msgid "Site branding"
-msgstr "Брендинг сайта"
+msgstr "Заголовок сайта"
+
+msgid "Set a custom title for your Kallithea Service."
+msgstr "Задать другое имя для Kallithea Service."
 
 msgid "HTTP authentication realm"
 msgstr "Приветствие для HTTP-аутентификации"
 
+msgid "HTML/JavaScript/CSS customization block"
+msgstr "Блок редактирования HTML/JavaScript/CSS"
+
+msgid ""
+"HTML (possibly with                         JavaScript and/or CSS) that "
+"will be added to the bottom                         of every page. This "
+"can be used for web analytics                         systems, but also "
+"to                         perform instance-specific customizations like "
+"adding a                         project banner at the top of every page."
+msgstr ""
+"Код HTML (можно с                         JavaScript и/или CSS), который "
+"будет добавлен внизу                         каждой страницы. Может "
+"использоваться для размещения                         веб-аналитики, но "
+"также                         и для создания индивидуальных "
+"модификаций,                         например, для размещения баннера "
+"проекта                         на каждой странице."
+
+msgid "ReCaptcha public key"
+msgstr "Открытый ключ reCaptcha"
+
+msgid "Public key for reCaptcha system."
+msgstr "Открытый ключ системы reCaptcha."
+
+msgid "ReCaptcha private key"
+msgstr "Закрытый ключ reCaptcha"
+
+msgid ""
+"Private key for reCaptcha system. Setting this value will enable captcha "
+"on registration."
+msgstr ""
+"Закрытый ключ системы reCaptcha. Задание этого значения включит капчу при "
+"регистрации."
+
 msgid "Save Settings"
 msgstr "Сохранить настройки"
 
+msgid "Built-in Mercurial Hooks (Read-Only)"
+msgstr "Встроенные хуки Mercurial (только чтение)"
+
 msgid "Custom Hooks"
 msgstr "Пользовательские хуки"
 
+msgid ""
+"Hooks can be used to trigger actions on certain events such as push / "
+"pull. They can trigger Python functions or external applications."
+msgstr ""
+"Хуки используются для активации действий при определённых событиях, "
+"например, push/pull-запросах. Могут активироваться функции Python либо "
+"внешние приложения."
+
 msgid "Failed to remove hook"
 msgstr "Не удалось удалить хук"
 
+msgid "Rescan options"
+msgstr "Опции пересканирования"
+
+msgid "Delete records of missing repositories"
+msgstr "Удалить записи об отсутствующих репозиториях"
+
+msgid ""
+"Check this option to remove all comments, pull requests and other records "
+"related to repositories that no longer exist in the filesystem."
+msgstr ""
+"Отметьте для удаления всех комментариев, pull-запросов и других записей, "
+"связанных с репозиториями, которые больше не существуют в файловой "
+"системе."
+
 msgid "Invalidate cache for all repositories"
 msgstr "Сбросить кэш для всех репозиториев"
 
 msgid "Check this to reload data and clear cache keys for all repositories."
-msgstr "Сбросить кэш для всех репозиториев."
+msgstr ""
+"Отметьте, чтобы перезагрузить данные и очистить ключи кэша у всех "
+"репозиториев."
+
+msgid "Install Git hooks"
+msgstr "Установить хуки Git"
+
+msgid ""
+"Verify if Kallithea's Git hooks are installed for each repository. "
+"Current hooks will be updated to the latest version."
+msgstr ""
+"Проверяет установку Git хуков от Kallithea у каждого репозитория. Текущие "
+"хуки будут обновлены до последней версии."
+
+msgid "Overwrite existing Git hooks"
+msgstr "Перезаписать существующие хуки"
+
+msgid ""
+"If installing Git hooks, overwrite any existing hooks, even if they do "
+"not seem to come from Kallithea. WARNING: This operation will destroy any "
+"custom git hooks you may have deployed by hand!"
+msgstr ""
+"Перезаписывает все существующие хуки при установке хуков Git, даже если "
+"они не поставляются с Kallithea. ПРЕДУПРЕЖДЕНИЕ: это действие уничтожит "
+"любые Git хуки, которые могли быть созданы вручную!"
+
+msgid "Rescan Repositories"
+msgstr "Пересканировать репозитории"
 
 msgid "Index build option"
 msgstr "Опции создания индекса"
 
 msgid "Build from scratch"
-msgstr "Сборка с нуля"
+msgstr "Пересобрать"
+
+msgid ""
+"This option completely reindexeses all of the repositories for proper "
+"fulltext search capabilities."
+msgstr ""
+"Эта опция полностью переиндексирует все репозитории для корректной работы "
+"полнотекстового поиска."
 
 msgid "Reindex"
 msgstr "Перестроить индекс"
 
+msgid "Checking for updates..."
+msgstr "Поиск обновлений..."
+
+msgid "Kallithea version"
+msgstr "Версия Kallithea"
+
+msgid "Kallithea configuration file"
+msgstr "Конфиг. Kallithea"
+
+msgid "Python version"
+msgstr "Версия Python"
+
+msgid "Platform"
+msgstr "Платформа"
+
 msgid "Git version"
 msgstr "Версия Git"
 
+msgid "Git path"
+msgstr "Путь к Git"
+
+msgid "Python Packages"
+msgstr "Пакеты Python"
+
 msgid "Show repository size after push"
 msgstr "Показывать размер репозитория после отправки"
 
@@ -1466,6 +2362,13 @@
 msgid "Enable hgsubversion extension"
 msgstr "Включить поддержку hgsubversion"
 
+msgid ""
+"Requires hgsubversion library to be installed. Enables cloning of remote "
+"Subversion repositories while converting them to Mercurial."
+msgstr ""
+"Требует наличия библиотеки hgsubversion. Включает клонирование удалённых "
+"репозиториев Subversion с последующим конвертированием в Mercurial."
+
 msgid "Location of repositories"
 msgstr "Местонахождение репозиториев"
 
@@ -1476,6 +2379,13 @@
 "Нажмите для разблокирования. Изменения вступят в силу после перезагрузки "
 "Kallithea."
 
+msgid ""
+"Filesystem location where repositories are stored. After changing this "
+"value, a restart and rescan of the repository folder are both required."
+msgstr ""
+"Путь к репозиториям в файловой системе. После изменения значения "
+"требуется перезапуск и пересканирование папки с репозиториями."
+
 msgid "General"
 msgstr "Главное"
 
@@ -1488,6 +2398,116 @@
 msgid "Show Kallithea version"
 msgstr "Отображать версию Kallithea"
 
+msgid ""
+"Shows or hides a version number of Kallithea displayed in the footer."
+msgstr "Показывает или скрывает версию Kallithea внизу страницы."
+
+msgid "Show user Gravatars"
+msgstr "Отображать Gravatars пользователя"
+
+msgid ""
+"Gravatar URL allows you to use another avatar server application.\n"
+"                                                        The following "
+"variables of the URL will be replaced accordingly.\n"
+"                                                        {scheme}    "
+"'http' or 'https' sent from running Kallithea server,\n"
+"                                                        {email}     user "
+"email,\n"
+"                                                        {md5email}  md5 "
+"hash of the user email (like at gravatar.com),\n"
+"                                                        {size}      size "
+"of the image that is expected from the server application,\n"
+"                                                        {netloc}    "
+"network location/server host of running Kallithea server"
+msgstr ""
+"Поле Gravatar URL позволяет использовать любой другой сервис аватаров.\n"
+"                                                        В URL можно "
+"использовать следующие переменные:\n"
+"                                                        {scheme}    "
+"используемый протокол, 'http' или 'https',\n"
+"                                                        {email}     e-"
+"mail пользователя,\n"
+"                                                        {md5email}  хэш "
+"md5 адреса почты пользователя (как на gravatar.com),\n"
+"                                                        {size}      "
+"ожидаемый размер изображения,\n"
+"                                                        {netloc}    "
+"сетевой путь/адрес хоста сервера Kallithea"
+
+msgid "HTTP Clone URL"
+msgstr "Ссылка для клонирования по HTTP"
+
+msgid ""
+"Schema of clone URL construction eg. '{scheme}://{user}@{netloc}/"
+"{repo}'.\n"
+"                                                    The following "
+"variables are available:\n"
+"                                                    {scheme} 'http' or "
+"'https' sent from running Kallithea server,\n"
+"                                                    {user}   current user "
+"username,\n"
+"                                                    {netloc} network "
+"location/server host of running Kallithea server,\n"
+"                                                    {repo}   full "
+"repository name,\n"
+"                                                    {repoid} ID of "
+"repository, can be used to construct clone-by-id,\n"
+"                                                    {system_user}  name "
+"of the Kallithea system user,\n"
+"                                                    {hostname}  server "
+"hostname\n"
+"                                                    "
+msgstr ""
+"Схема URL для клонирования, например: '{scheme}://{user}@{netloc}/"
+"{repo}'.\n"
+"                                                    Доступны следующие "
+"переменные:\n"
+"                                                    {scheme} используемый "
+"протокол, 'http' or 'https',\n"
+"                                                    {user}   имя текущего "
+"пользователя,\n"
+"                                                    {netloc} сетевой путь/"
+"адрес хоста сервера Kallithea,\n"
+"                                                    {repo}   полное имя "
+"репозитория,\n"
+"                                                    {repoid} ID "
+"репозитория, может применяться для клонирования по идентификатору,\n"
+"                                                    {system_user}  имя "
+"пользователя Kallithea в системе,\n"
+"                                                    {hostname}  имя хоста "
+"севера\n"
+"                                                    "
+
+msgid "SSH Clone URL"
+msgstr "Ссылка для клонирования по SSH"
+
+msgid ""
+"Schema for constructing SSH clone URL, eg. 'ssh://{system_user}"
+"@{hostname}/{repo}'."
+msgstr ""
+"Схема URL для клонирования по SSH, например: 'ssh://{system_user}"
+"@{hostname}/{repo}'."
+
+msgid "Repository page size"
+msgstr "Размер страницы репозитория"
+
+msgid ""
+"Number of items displayed in the repository pages before pagination is "
+"shown."
+msgstr ""
+"Количество элементов на странице репозитория до появления нумерации "
+"страниц."
+
+msgid "Admin page size"
+msgstr "Размер страницы администратора"
+
+msgid ""
+"Number of items displayed in the admin pages grids before pagination is "
+"shown."
+msgstr ""
+"Количество элементов в сетке страницы администратора до появления "
+"нумерации страниц."
+
 msgid "Icons"
 msgstr "Иконки"
 
@@ -1500,15 +2520,49 @@
 msgid "Show public/private icons next to repository names."
 msgstr "Показывать иконки публичных репозиториев."
 
+msgid "Meta Tagging"
+msgstr "Метатегирование"
+
+msgid ""
+"Parses meta tags from the repository description field and turns them "
+"into colored tags."
+msgstr ""
+"Анализирует мета-теги в поле описания репозитория и отображает их в виде "
+"цветных тегов."
+
+msgid "Stylify recognised meta tags:"
+msgstr "Стилизовать обнаруженные мета-теги:"
+
 msgid "Add user group"
 msgstr "Добавить группу пользователей"
 
+msgid "User Groups"
+msgstr "Группы пользователей"
+
+msgid "Add User Group"
+msgstr "Добавить группу пользователей"
+
+msgid "Short, optional description for this user group."
+msgstr "Краткое, опциональное описание этой группы."
+
+msgid "%s user group settings"
+msgstr "Настройки группы %s"
+
+msgid "Show Members"
+msgstr "Участники"
+
+msgid "User Group: %s"
+msgstr "Группа пользователей: %s"
+
 msgid "Members"
 msgstr "Участники"
 
 msgid "Confirm to delete this user group: %s"
 msgstr "Подтвердите удаление следующей группы пользователей: %s"
 
+msgid "Delete this user group"
+msgstr "Удалить группу"
+
 msgid "No members yet"
 msgstr "Нет участников"
 
@@ -1527,21 +2581,57 @@
 msgid "Users"
 msgstr "Пользователи"
 
+msgid "Add User"
+msgstr "Добавить пользователя"
+
 msgid "Password confirmation"
 msgstr "Подтверждение пароля"
 
+msgid "%s user settings"
+msgstr "Настройки пользователя %s"
+
+msgid "Emails"
+msgstr "Электронная почта"
+
+msgid "User: %s"
+msgstr "Пользователь: %s"
+
+msgid "Source of Record"
+msgstr "Источник записи"
+
+msgid "Last Login"
+msgstr "Последний вход"
+
+msgid "Member of User Groups"
+msgstr "Член группы"
+
 msgid "Confirm to delete this user: %s"
 msgstr "Подтвердите удаление пользователя %s"
 
+msgid "Delete this user"
+msgstr "Удалить пользователя"
+
+msgid "Inherited from %s"
+msgstr "Унаследовано от %s"
+
+msgid "Name in Source of Record"
+msgstr "Имя в источнике записи"
+
 msgid "New password confirmation"
 msgstr "Подтвердите новый пароль"
 
 msgid "Users Administration"
 msgstr "Администрирование пользователей"
 
+msgid "Auth Type"
+msgstr "Тип авторизации"
+
 msgid "Server instance: %s"
 msgstr "Экземпляр сервера: %s"
 
+msgid "Support"
+msgstr "Поддержка"
+
 msgid "Mercurial repository"
 msgstr "Репозиторий Mercurial"
 
@@ -1570,7 +2660,7 @@
 msgstr "Опции"
 
 msgid "Compare Fork"
-msgstr "Сравнить форк"
+msgstr "Сравнить форки"
 
 msgid "Compare"
 msgstr "Сравнить"
@@ -1579,20 +2669,23 @@
 msgstr "Поиск"
 
 msgid "Follow"
-msgstr "Наблюдать"
+msgstr "Подписаться"
 
 msgid "Unfollow"
-msgstr "Не наблюдать"
+msgstr "Отписаться"
 
 msgid "Fork"
 msgstr "Форк"
 
 msgid "Create Pull Request"
-msgstr "Создать Pull запрос"
+msgstr "Создать pull-запрос"
 
 msgid "Switch To"
 msgstr "Переключиться на"
 
+msgid "No matches found"
+msgstr "Совпадений не найдено"
+
 msgid "Show recent activity"
 msgstr "Показать последнюю активность"
 
@@ -1618,7 +2711,7 @@
 msgstr "Поиск по репозиториям"
 
 msgid "My Pull Requests"
-msgstr "Мои Pull-запросы"
+msgstr "Мои pull-запросы"
 
 msgid "Not Logged In"
 msgstr "Не авторизован"
@@ -1626,9 +2719,21 @@
 msgid "Login to Your Account"
 msgstr "Авторизоваться"
 
+msgid "Forgot password?"
+msgstr "Забыли пароль?"
+
+msgid "Don't have an account?"
+msgstr "Нет аккаунта?"
+
 msgid "Log Out"
 msgstr "Выход"
 
+msgid "Parent rev."
+msgstr "Ревизия предка"
+
+msgid "Child rev."
+msgstr "Ревизия потомка"
+
 msgid "Create repositories"
 msgstr "Создать репозитории"
 
@@ -1642,11 +2747,11 @@
 msgstr "Опция разрешает пользователю создавать группы пользователей"
 
 msgid "Fork repositories"
-msgstr "Создавать fork от репозиториев"
+msgstr "Создавать форки"
 
 msgid "Select this option to allow repository forking for this user"
 msgstr ""
-"Выберите эту опцию чтобы разрешить данному пользователю создавать fork'и "
+"Выберите, чтобы разрешить данному пользователю создавать форки "
 "репозиториев"
 
 msgid "Show"
@@ -1661,17 +2766,26 @@
 msgid "Edit Permission"
 msgstr "Изменить привилегии"
 
+msgid "No permission defined"
+msgstr "Права не заданы"
+
+msgid "Retry"
+msgstr "Повторить"
+
 msgid "Submitting ..."
 msgstr "Применение..."
 
+msgid "Unable to post"
+msgstr "Не удалось отправить"
+
 msgid "Add Another Comment"
 msgstr "Добавить ещё один комментарий"
 
 msgid "Stop following this repository"
-msgstr "Отменить наблюдение за репозиторием"
+msgstr "Отписаться от этого репозитория"
 
 msgid "Start following this repository"
-msgstr "Наблюдать за репозиторием"
+msgstr "Подписаться к этому репозиторию"
 
 msgid "Group"
 msgstr "Группа"
@@ -1688,6 +2802,15 @@
 msgid "No matching files"
 msgstr "Нет совпадений"
 
+msgid "Open New Pull Request from {0}"
+msgstr "Открыть новый pull-запрос от {0}"
+
+msgid "Open New Pull Request for {0} &rarr; {1}"
+msgstr "Открыть новый pull-запрос для {0} &rarr; {1}"
+
+msgid "Show Selected Changesets {0} &rarr; {1}"
+msgstr "Показать выбранные наборы изменений: {0} &rarr; {1}"
+
 msgid "Selection Link"
 msgstr "Ссылка выбора"
 
@@ -1697,6 +2820,13 @@
 msgid "Expand Diff"
 msgstr "Раскрыть сравнение"
 
+msgid "No revisions"
+msgstr "Нет ревизий"
+
+msgid "Type name of user or member to grant permission"
+msgstr ""
+"Введите имя пользователя или члена группы для предоставления доступа"
+
 msgid "Failed to revoke permission"
 msgstr "Не удалось отозвать привилегии"
 
@@ -1737,7 +2867,7 @@
 msgstr "Очистить выбор"
 
 msgid "Go to tip of repository"
-msgstr "Перейти на верхушку репозитория"
+msgstr "Перейти к началу репозитория"
 
 msgid "Compare fork with %s"
 msgstr "Сравнить fork с %s"
@@ -1763,6 +2893,31 @@
 msgid "Affected %s files"
 msgstr "Затрагивает %s файлов"
 
+msgid "First (oldest) changeset in this list"
+msgstr "Первый (самый старый) набор изменений в списке"
+
+msgid "Last (most recent) changeset in this list"
+msgstr "Последний (самый свежий) набор изменений в списке"
+
+msgid "Position in this list of changesets"
+msgstr "Позиция в списке наборов изменений"
+
+msgid ""
+"Changeset status: %s by %s\n"
+"Click to open associated pull request %s"
+msgstr ""
+"Статус набора изменений: %s от %s\n"
+"Кликните, чтобы открыть соответствующий pull-запрос %s"
+
+msgid "Changeset status: %s by %s"
+msgstr "Статус набора изменений: %s от %s"
+
+msgid "Expand commit message"
+msgstr "Развернуть сообщение фиксации"
+
+msgid "%s comments"
+msgstr "%s комментариев"
+
 msgid "Bookmark %s"
 msgstr "Закладка %s"
 
@@ -1776,7 +2931,7 @@
 msgstr "%s Изменения"
 
 msgid "Changeset status"
-msgstr "Статут изменений"
+msgstr "Статус изменений"
 
 msgid "Raw diff"
 msgstr "Отобразить в формате diff"
@@ -1787,9 +2942,21 @@
 msgid "Download diff"
 msgstr "Скачать diff"
 
+msgid "Merge"
+msgstr "Слить"
+
 msgid "Grafted from:"
 msgstr "Перенесено из:"
 
+msgid "Transplanted from:"
+msgstr "Трансплантировано из:"
+
+msgid "Replaced by:"
+msgstr "Заменено:"
+
+msgid "Preceded by:"
+msgstr "Предшествует:"
+
 msgid "%s file changed"
 msgid_plural "%s files changed"
 msgstr[0] "%s файл изменён"
@@ -1805,18 +2972,41 @@
 msgid "Show full diff anyway"
 msgstr "Показать полный diff"
 
+msgid "comment"
+msgstr "комментарий"
+
+msgid "on pull request"
+msgstr "в pull-запросе"
+
 msgid "No title"
 msgstr "Нет заголовка"
 
+msgid "on this changeset"
+msgstr "в этом наборе изменений"
+
 msgid "Delete comment?"
 msgstr "Удалить комментарий?"
 
+msgid "Status change"
+msgstr "Изменение статуса"
+
+msgid "Comments are in plain text. Use @username to notify another user."
+msgstr ""
+"Используйте @имя_пользователя в тексте, чтобы отправить оповещение "
+"указанному пользователю."
+
 msgid "Set changeset status"
 msgstr "Изменить статус ревизии"
 
+msgid "Vote for pull request status"
+msgstr "Голосовать за статус pull-запроса"
+
 msgid "No change"
 msgstr "Без изменений"
 
+msgid "Finish pull request"
+msgstr "Завершить pull-запрос"
+
 msgid "Close"
 msgstr "Закрыть"
 
@@ -1844,6 +3034,12 @@
 msgstr[1] "%d к строкам"
 msgstr[2] "%d к строкам"
 
+msgid "%d general"
+msgid_plural "%d general"
+msgstr[0] "%d общий"
+msgstr[1] "%d общих"
+msgstr[2] "%d общих"
+
 msgid "%s Changesets"
 msgstr "%s Изменения"
 
@@ -1853,12 +3049,30 @@
 msgid "Files affected"
 msgstr "Затронутые файлы"
 
+msgid "No file before"
+msgstr "Нет предшествующего файла"
+
+msgid "File before"
+msgstr "Файл до"
+
+msgid "Modified"
+msgstr "Модифицирован"
+
 msgid "Deleted"
 msgstr "Удалён"
 
 msgid "Renamed"
 msgstr "Переименован"
 
+msgid "Unknown operation: %r"
+msgstr "Неизвестная операция: %r"
+
+msgid "No file after"
+msgstr "Нет последующего файла"
+
+msgid "File after"
+msgstr "Файл после"
+
 msgid "Show full diff for this file"
 msgstr "Показать полный diff для этого файла"
 
@@ -1866,11 +3080,24 @@
 msgstr "Показать полный diff для этого файла"
 
 msgid "Show inline comments"
-msgstr "Показать комментарии в строках"
+msgstr "Показать комментарии к строкам"
 
 msgid "No changesets"
 msgstr "Нет изменений"
 
+msgid "Criss cross merge situation with multiple merge ancestors detected!"
+msgstr "Обнаружено перекрёстное слияние с различными предками!"
+
+msgid ""
+"Please merge the target branch to your branch before creating a pull "
+"request."
+msgstr ""
+"Прежде чем создавать pull-запрос, выполните слияние целевой ветви с вашей "
+"ветвью."
+
+msgid "Merge Ancestor"
+msgstr "Слияние с предком"
+
 msgid "Show merge diff"
 msgstr "Показать merge diff"
 
@@ -1886,6 +3113,15 @@
 msgid "%s Compare"
 msgstr "%s Сравнить"
 
+msgid "Compare Revisions"
+msgstr "Сравнить ревизии"
+
+msgid "Swap"
+msgstr "Поменять местами"
+
+msgid "Compare revisions, branches, bookmarks, or tags."
+msgstr "Сравнение ревизий, ветвей, закладок и тегов."
+
 msgid "Showing %s commit"
 msgid_plural "Showing %s commits"
 msgstr[0] "Показать %s commit"
@@ -1898,6 +3134,9 @@
 msgid "Public repository"
 msgstr "Публичный репозиторий"
 
+msgid "Repository creation in progress..."
+msgstr "Создание репозитория в процессе..."
+
 msgid "No changesets yet"
 msgstr "Изменений ещё не было"
 
@@ -1907,9 +3146,90 @@
 msgid "Subscribe to %s atom feed"
 msgstr "Подписаться на ленту Atom %s"
 
+msgid "Creating"
+msgstr "Создание"
+
+msgid "Mention in Comment on Changeset \"%s\""
+msgstr "Упоминание в комментарии к набору изменений «%s»"
+
+msgid "Comment on Changeset \"%s\""
+msgstr "Комментарий к набору изменений «%s»"
+
+msgid "Changeset on"
+msgstr "Набр изменений для"
+
+msgid "branch"
+msgstr "ветви"
+
+msgid "by"
+msgstr "от"
+
+msgid "Status change:"
+msgstr "Изменение статуса:"
+
+msgid "The pull request has been closed."
+msgstr "Этот pull-запрос закрыт."
+
 msgid "Hello %s"
 msgstr "Здравствуйте, %s"
 
+msgid "We have received a request to reset the password for your account."
+msgstr "Мы получили запрос на сброс пароля для вашего аккаунта."
+
+msgid ""
+"This account is however managed outside this system and the password "
+"cannot be changed here."
+msgstr ""
+"Однако, поскольку этот аккаунт управляется извне, мы не можем изменить "
+"пароль здесь."
+
+msgid "To set a new password, click the following link"
+msgstr "Перейдите по ссылке, чтобы задать новый пароль"
+
+msgid ""
+"Should you not be able to use the link above, please type the following "
+"code into the password reset form"
+msgstr ""
+"В случае, если перейти по ссылке не удаётся, введите в форме сброса "
+"пароля следующий код"
+
+msgid ""
+"If it weren't you who requested the password reset, just disregard this "
+"message."
+msgstr ""
+"Если вы не запрашивали сброс пароля, то просто проигнорируйте это "
+"сообщение."
+
+msgid "Mention on Pull Request %s \"%s\" by %s"
+msgstr "Упоминание в pull-запросе %s «%s» от %s"
+
+msgid "Added as Reviewer of Pull Request %s \"%s\" by %s"
+msgstr "Добавлен в качестве ревьювера pull-запроса %s «%s» пользователем %s"
+
+msgid "Pull request"
+msgstr "Pull-запрос"
+
+msgid "from"
+msgstr "от"
+
+msgid "to"
+msgstr "к"
+
+msgid "Mention in Comment on Pull Request %s \"%s\""
+msgstr "Упоминание в комментарии к pull-запросу %s «%s»"
+
+msgid "Pull Request %s \"%s\" Closed"
+msgstr "Pull-запрос %s «%s» закрыт"
+
+msgid "Comment on Pull Request %s \"%s\""
+msgstr "Комментарий к pull-запросу %s «%s»"
+
+msgid "Full Name"
+msgstr "Полное имя"
+
+msgid "%s File side-by-side diff"
+msgstr "Построчное сравнение файла %s"
+
 msgid "File diff"
 msgstr "Сравнение файлов"
 
@@ -1917,20 +3237,41 @@
 msgstr "Сравнение файла %s"
 
 msgid "%s Files"
-msgstr "%s Файлы"
+msgstr "%s файлов"
 
 msgid "%s Files Add"
 msgstr "%s Файлов добавлено"
 
+msgid "Add New File"
+msgstr "Добавить новый файл"
+
 msgid "Location"
 msgstr "Расположение"
 
+msgid "Enter filename..."
+msgstr "Введите имя файла..."
+
 msgid "or"
 msgstr "или"
 
+msgid "Upload File"
+msgstr "Загрузить файл"
+
+msgid "Create New File"
+msgstr "Создать новый файл"
+
+msgid "New file type"
+msgstr "Тип файла"
+
+msgid "Commit Message"
+msgstr "Зафиксировать сообщение"
+
 msgid "Commit Changes"
 msgstr "Применить изменения"
 
+msgid "Search File List"
+msgstr "Искать в списке файлов"
+
 msgid "Loading file list..."
 msgstr "Загружается список файлов..."
 
@@ -1946,9 +3287,24 @@
 msgid "Last Committer"
 msgstr "Автор последней ревизии"
 
+msgid "%s Files Delete"
+msgstr "Удаление файлов %s"
+
+msgid "Delete file"
+msgstr "Удалить файл"
+
+msgid "%s File Edit"
+msgstr "Правка файла %s"
+
 msgid "Edit file"
 msgstr "Редактировать файл"
 
+msgid "Show Annotation"
+msgstr "Показать аннотацию"
+
+msgid "Download as Raw"
+msgstr "Загрузить в исходном виде"
+
 msgid "Source"
 msgstr "Исходный код"
 
@@ -1958,9 +3314,35 @@
 msgstr[1] "%s авторов"
 msgstr[2] "%s автора"
 
+msgid "Diff to Revision"
+msgstr "Разница с ревизией"
+
+msgid "Show at Revision"
+msgstr "Показать в ревизии"
+
+msgid "Show Full History"
+msgstr "Показать всю историю"
+
+msgid "Show Authors"
+msgstr "Показать авторов"
+
+msgid "Show Source"
+msgstr "Показать источник"
+
+msgid "Edit on Branch: %s"
+msgstr "Правка в ветке: %s"
+
+msgid "Editing binary files not allowed"
+msgstr "Редактирование бинарных файлов не допускается"
+
 msgid "Editing files allowed only when on branch head revision"
 msgstr "Редактирование файлов разрешено только в HEAD-ревизии данной ветки"
 
+msgid "Deleting files allowed only when on branch head revision"
+msgstr ""
+"Удаление файлов допускается только при нахождении в текущей ветке (branch "
+"head)"
+
 msgid "Binary file (%s)"
 msgstr "Бинарный файл (%s)"
 
@@ -1976,17 +3358,23 @@
 msgid "annotation"
 msgstr "аннотация"
 
+msgid "Go Back"
+msgstr "Назад"
+
 msgid "No files at given path"
 msgstr "По заданному пути файлы отсутствуют"
 
 msgid "%s Followers"
-msgstr "%s Наблюдатели"
+msgstr "%s Подписчики"
 
 msgid "Followers"
-msgstr "Наблюдатели"
+msgstr "Подписчики"
 
 msgid "Started following -"
-msgstr "Наблюдать за репозиторием"
+msgstr "Подписался -"
+
+msgid "Fork repository %s"
+msgstr "Создать форк репозитория %s"
 
 msgid "Fork name"
 msgstr "Имя форка"
@@ -2003,7 +3391,7 @@
 msgstr "Скопировать привилегии"
 
 msgid "Copy permissions from forked repository"
-msgstr "Скопировать привилегии с форкнутого репозитория"
+msgstr "Скопировать права доступа с форка репозитория"
 
 msgid "Update after clone"
 msgstr "Обновлять после клонирования"
@@ -2011,17 +3399,20 @@
 msgid "Checkout source after making a clone"
 msgstr "Скачивать исходники после создания клона"
 
+msgid "Fork this Repository"
+msgstr "Создать форк"
+
 msgid "%s Forks"
 msgstr "Форки %s"
 
 msgid "Forks"
-msgstr "Ответвления"
+msgstr "Форки"
 
 msgid "Forked"
-msgstr "Форкнуто"
+msgstr "Форк создан"
 
 msgid "There are no forks yet"
-msgstr "Форки ещё не созданы"
+msgstr "Форков пока нет"
 
 msgid "ATOM journal feed"
 msgstr "Лента журнала ATOM"
@@ -2047,8 +3438,11 @@
 msgid "Title"
 msgstr "Заголовок"
 
+msgid "Summarize the changes - or leave empty"
+msgstr "Опишите изменения — или оставьте пустым"
+
 msgid "Write a short description on this pull request"
-msgstr "Написать короткое писание по этому запросу"
+msgstr "Оставьте краткое описание этого pull-запроса"
 
 msgid "Changeset flow"
 msgstr "Поток изменений"
@@ -2065,9 +3459,27 @@
 msgid "No entries"
 msgstr "Записи отсуствуют"
 
+msgid "Vote"
+msgstr "Голосовать"
+
 msgid "Age"
 msgstr "Возраст"
 
+msgid "From"
+msgstr "От"
+
+msgid "To"
+msgstr "К"
+
+msgid "You voted: %s"
+msgstr "Ваш выбор: %s"
+
+msgid "You didn't vote"
+msgstr "Вы не голосовали"
+
+msgid "(no title)"
+msgstr "(без заголовка)"
+
 msgid "Closed"
 msgstr "Закрыто"
 
@@ -2075,43 +3487,132 @@
 msgstr "Удалить pull-запрос"
 
 msgid "Confirm to delete this pull request"
-msgstr "Подтвердите удаление этого pull-request'а"
+msgstr "Подтвердите удаление этого pull-запроса"
+
+msgid "Confirm again to delete this pull request with %s comments"
+msgstr ""
+"Ещё раз подтвердите удаление pull-запроса со всеми (%s) комментариями"
+
+msgid "%s Pull Request %s"
+msgstr "%s pull-запрос %s"
 
 msgid "Pull request %s from %s#%s"
 msgstr "Pull-запросы %s от %s#%s"
 
+msgid "Summarize the changes"
+msgstr "Опишите изменения"
+
+msgid "Voting Result"
+msgstr "Результаты голосования"
+
+msgid "Pull request status calculated from votes"
+msgstr "Статус pull-запроса определён по голосованию"
+
+msgid "Origin"
+msgstr "Происхождение"
+
+msgid "on"
+msgstr "на"
+
+msgid "Target"
+msgstr "Цель"
+
+msgid ""
+"This is just a range of changesets and doesn't have a target or a real "
+"merge ancestor."
+msgstr ""
+"Это всего лишь перечень наборов изменений, который не имеет цели или "
+"реального предка для слияния."
+
 msgid "Pull changes"
 msgstr "Принять изменения"
 
+msgid "Next iteration"
+msgstr "Следующая итерация"
+
+msgid "Current revision - no change"
+msgstr "Текущая ревизия — без изменений"
+
+msgid ""
+"Pull request iterations do not change content once created. Select a "
+"revision to create a new iteration."
+msgstr ""
+"Итерации pull-запросов не изменяются после создания. Выберите ревизию для "
+"создания новой итерации."
+
 msgid "Save Changes"
 msgstr "Сохранить изменения"
 
+msgid "Create New Iteration with Changes"
+msgstr "Создать итерацию с изменениями"
+
 msgid "Cancel Changes"
 msgstr "Отменить изменения"
 
+msgid "Reviewers"
+msgstr "Ревьюверы"
+
 msgid "Remove reviewer"
 msgstr "Удалить рецензента"
 
+msgid "Type name of reviewer to add"
+msgstr "Введите имя добавляемого ревьювера"
+
 msgid "Potential Reviewers"
 msgstr "Потенциальные рецензенты"
 
+msgid "Click to add the repository owner as reviewer:"
+msgstr "Нажмите, чтобы добавить владельца репозитория в качестве ревьювера:"
+
+msgid "Pull Request Content"
+msgstr "Содержимое pull-запроса"
+
 msgid "Common ancestor"
 msgstr "Общий предок"
 
 msgid "%s Pull Requests"
-msgstr "%s Запросы на внесение изменений"
+msgstr "Pull-запросы %s"
+
+msgid "Pull Requests from '%s'"
+msgstr "Pull-запросы от '%s'"
 
 msgid "Pull Requests to '%s'"
-msgstr "Pull-запросы для %s"
+msgstr "Pull-запросы для '%s'"
 
 msgid "Open New Pull Request"
 msgstr "Создать новый pull-запрос"
 
+msgid "Show Pull Requests to %s"
+msgstr "Показать pull-запросы для '%s'"
+
+msgid "Show Pull Requests from '%s'"
+msgstr "Показать pull-запросы от '%s'"
+
+msgid "Hide closed pull requests (only show open pull requests)"
+msgstr ""
+"Спрятать закрытые pull-запросы (показывать только открытые pull-запросы)"
+
+msgid "Show closed pull requests (in addition to open pull requests)"
+msgstr ""
+"Показывать закрытые pull-запросы (в дополнение к открытым pull-запросам)"
+
+msgid "Pull Requests Created by Me"
+msgstr "Pull-запросы, созданные мной"
+
+msgid "Pull Requests Needing My Review"
+msgstr "Pull-запросы, требующие моего рассмотрения"
+
+msgid "Pull Requests I Participate In"
+msgstr "Pull-запросы, в которых я участвую"
+
+msgid "%s Search"
+msgstr "Поиск %s"
+
 msgid "Search in All Repositories"
 msgstr "Поиск по всем репозиториям"
 
 msgid "Search term"
-msgstr "Фраза для поиска"
+msgstr "Поисковый запрос"
 
 msgid "Search in"
 msgstr "Критерий поиска"
@@ -2128,6 +3629,9 @@
 msgid "Permission denied"
 msgstr "Недостаточно прав"
 
+msgid "%s Statistics"
+msgstr "Статистика %s"
+
 msgid "%s ATOM feed"
 msgstr "ATOM лента репозитория %s"
 
@@ -2174,7 +3678,7 @@
 msgstr "%s общие сведения"
 
 msgid "Fork of"
-msgstr "Форк от"
+msgstr "Форк репозитория"
 
 msgid "Clone from"
 msgstr "Клонировать из"
@@ -2182,6 +3686,18 @@
 msgid "Clone URL"
 msgstr "Ссылка для клонирования"
 
+msgid "Use ID"
+msgstr "Использовать ID"
+
+msgid "Use SSH"
+msgstr "Использовать SSH"
+
+msgid "Use Name"
+msgstr "Использовать имя"
+
+msgid "Use HTTP"
+msgstr "Использовать HTTP"
+
 msgid "Trending files"
 msgstr "Популярные файлы"
 
@@ -2215,8 +3731,14 @@
 msgid "Add or upload files directly via Kallithea"
 msgstr "Добавить или загрузить файлы через Kallithea"
 
+msgid "Push new repository"
+msgstr "Отправить новый репозиторий"
+
 msgid "Existing repository?"
 msgstr "Существующий репозиторий?"
 
+msgid "Readme file from revision %s:%s"
+msgstr "Файл readme из ревизии %s:%s"
+
 msgid "Download %s as %s"
 msgstr "Скачать %s как %s"
--- a/kallithea/lib/annotate.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/annotate.py	Sat May 02 21:20:43 2020 +0200
@@ -25,16 +25,15 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import StringIO
-
 from pygments import highlight
 from pygments.formatters import HtmlFormatter
 
 from kallithea.lib.vcs.exceptions import VCSError
 from kallithea.lib.vcs.nodes import FileNode
+from kallithea.lib.vcs.utils import safe_str
 
 
-def annotate_highlight(filenode, annotate_from_changeset_func=None,
+def annotate_highlight(filenode, annotate_from_changeset_func,
         order=None, headers=None, **options):
     """
     Returns html portion containing annotated table with 3 columns: line
@@ -51,26 +50,26 @@
     """
     from kallithea.lib.pygmentsutils import get_custom_lexer
     options['linenos'] = True
-    formatter = AnnotateHtmlFormatter(filenode=filenode, order=order,
-        headers=headers,
-        annotate_from_changeset_func=annotate_from_changeset_func, **options)
+    formatter = AnnotateHtmlFormatter(filenode=filenode,
+        annotate_from_changeset_func=annotate_from_changeset_func, order=order,
+        headers=headers, **options)
     lexer = get_custom_lexer(filenode.extension) or filenode.lexer
-    highlighted = highlight(filenode.content, lexer, formatter)
+    highlighted = highlight(safe_str(filenode.content), lexer, formatter)
     return highlighted
 
 
 class AnnotateHtmlFormatter(HtmlFormatter):
 
-    def __init__(self, filenode, annotate_from_changeset_func=None,
+    def __init__(self, filenode, annotate_from_changeset_func,
             order=None, **options):
         """
-        If ``annotate_from_changeset_func`` is passed it should be a function
+        ``annotate_from_changeset_func`` must be a function
         which returns string from the given changeset. For example, we may pass
         following function as ``annotate_from_changeset_func``::
 
             def changeset_to_anchor(changeset):
                 return '<a href="/changesets/%s/">%s</a>\n' % \
-                       (changeset.id, changeset.id)
+                       (changeset.raw_id, changeset.raw_id)
 
         :param annotate_from_changeset_func: see above
         :param order: (default: ``['ls', 'annotate', 'code']``); order of
@@ -101,22 +100,13 @@
             raise VCSError("This formatter expect FileNode parameter, not %r"
                 % type(filenode))
 
-    def annotate_from_changeset(self, changeset):
-        """
-        Returns full html line for single changeset per annotated line.
-        """
-        if self.annotate_from_changeset_func:
-            return self.annotate_from_changeset_func(changeset)
-        else:
-            return ''.join((changeset.id, '\n'))
-
     def _wrap_tablelinenos(self, inner):
-        dummyoutfile = StringIO.StringIO()
+        inner_lines = []
         lncount = 0
         for t, line in inner:
             if t:
                 lncount += 1
-            dummyoutfile.write(line)
+            inner_lines.append(line)
 
         fl = self.linenostart
         mw = len(str(lncount + fl - 1))
@@ -166,7 +156,7 @@
 #        ln_ = len(ls.splitlines())
 #        if  ln_cs > ln_:
 #            annotate_changesets = annotate_changesets[:ln_ - ln_cs]
-        annotate = ''.join((self.annotate_from_changeset(el[2]())
+        annotate = ''.join((self.annotate_from_changeset_func(el[2]())
                             for el in self.filenode.annotate))
         # in case you wonder about the seemingly redundant <div> here:
         # since the content in the other cell also is wrapped in a div,
@@ -176,7 +166,7 @@
                   '<tr><td class="linenos"><div class="linenodiv"><pre>' +
                   ls + '</pre></div></td>' +
                   '<td class="code">')
-        yield 0, dummyoutfile.getvalue()
+        yield 0, ''.join(inner_lines)
         yield 0, '</td></tr></table>'
 
         '''
@@ -204,5 +194,5 @@
                   ''.join(headers_row) +
                   ''.join(body_row_start)
                   )
-        yield 0, dummyoutfile.getvalue()
+        yield 0, ''.join(inner_lines)
         yield 0, '</td></tr></table>'
--- a/kallithea/lib/app_globals.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/app_globals.py	Sat May 02 21:20:43 2020 +0200
@@ -39,9 +39,7 @@
         """One instance of Globals is created during application
         initialization and is available during requests via the
         'app_globals' variable
-
         """
-        self.available_permissions = None   # propagated after init_model
 
     @property
     def cache(self):
--- a/kallithea/lib/auth.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/auth.py	Sat May 02 21:20:43 2020 +0200
@@ -30,6 +30,7 @@
 import os
 import string
 
+import bcrypt
 import ipaddr
 from decorator import decorator
 from sqlalchemy.orm import joinedload
@@ -38,14 +39,13 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPForbidden, HTTPFound
 
-from kallithea import __platform__, is_unix, is_windows
+import kallithea
 from kallithea.config.routing import url
-from kallithea.lib.caching_query import FromCache
-from kallithea.lib.utils import conditional_cache, get_repo_group_slug, get_repo_slug, get_user_group_slug
-from kallithea.lib.utils2 import safe_str, safe_unicode
+from kallithea.lib.utils import get_repo_group_slug, get_repo_slug, get_user_group_slug
+from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes
 from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.model.db import (
-    Permission, RepoGroup, Repository, User, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm, UserIpMap, UserToPerm)
+from kallithea.model.db import (Permission, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm,
+                                UserGroupUserGroupToPerm, UserIpMap, UserToPerm)
 from kallithea.model.meta import Session
 from kallithea.model.user import UserModel
 
@@ -87,44 +87,34 @@
 
 def get_crypt_password(password):
     """
-    Cryptographic function used for password hashing based on pybcrypt
-    or Python's own OpenSSL wrapper on windows
+    Cryptographic function used for bcrypt password hashing.
 
     :param password: password to hash
     """
-    if is_windows:
-        return hashlib.sha256(password).hexdigest()
-    elif is_unix:
-        import bcrypt
-        return bcrypt.hashpw(safe_str(password), bcrypt.gensalt(10))
-    else:
-        raise Exception('Unknown or unsupported platform %s'
-                        % __platform__)
+    return ascii_str(bcrypt.hashpw(safe_bytes(password), bcrypt.gensalt(10)))
 
 
 def check_password(password, hashed):
     """
-    Checks matching password with it's hashed value, runs different
-    implementation based on platform it runs on
+    Checks password match the hashed value using bcrypt.
+    Remains backwards compatible and accept plain sha256 hashes which used to
+    be used on Windows.
 
     :param password: password
     :param hashed: password in hashed form
     """
     # sha256 hashes will always be 64 hex chars
     # bcrypt hashes will always contain $ (and be shorter)
-    if is_windows or len(hashed) == 64 and all(x in string.hexdigits for x in hashed):
+    if len(hashed) == 64 and all(x in string.hexdigits for x in hashed):
         return hashlib.sha256(password).hexdigest() == hashed
-    elif is_unix:
-        import bcrypt
-        try:
-            return bcrypt.checkpw(safe_str(password), safe_str(hashed))
-        except ValueError as e:
-            # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
-            log.error('error from bcrypt checking password: %s', e)
-            return False
-    else:
-        raise Exception('Unknown or unsupported platform %s'
-                        % __platform__)
+    try:
+        return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed))
+    except ValueError as e:
+        # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
+        log.error('error from bcrypt checking password: %s', e)
+        return False
+    log.error('check_password failed - no method found for hash length %s', len(hashed))
+    return False
 
 
 def _cached_perms_data(user_id, user_is_admin):
@@ -149,12 +139,9 @@
     #======================================================================
     # fetch default permissions
     #======================================================================
-    default_user = User.get_by_username('default', cache=True)
-    default_user_id = default_user.user_id
-
-    default_repo_perms = Permission.get_default_perms(default_user_id)
-    default_repo_groups_perms = Permission.get_default_group_perms(default_user_id)
-    default_user_group_perms = Permission.get_default_user_group_perms(default_user_id)
+    default_repo_perms = Permission.get_default_perms(kallithea.DEFAULT_USER_ID)
+    default_repo_groups_perms = Permission.get_default_group_perms(kallithea.DEFAULT_USER_ID)
+    default_user_group_perms = Permission.get_default_user_group_perms(kallithea.DEFAULT_USER_ID)
 
     if user_is_admin:
         #==================================================================
@@ -166,19 +153,19 @@
 
         # repositories
         for perm in default_repo_perms:
-            r_k = perm.UserRepoToPerm.repository.repo_name
+            r_k = perm.repository.repo_name
             p = 'repository.admin'
             permissions[RK][r_k] = p
 
         # repository groups
         for perm in default_repo_groups_perms:
-            rg_k = perm.UserRepoGroupToPerm.group.group_name
+            rg_k = perm.group.group_name
             p = 'group.admin'
             permissions[GK][rg_k] = p
 
         # user groups
         for perm in default_user_group_perms:
-            u_k = perm.UserUserGroupToPerm.user_group.users_group_name
+            u_k = perm.user_group.users_group_name
             p = 'usergroup.admin'
             permissions[UK][u_k] = p
         return permissions
@@ -189,7 +176,7 @@
 
     # default global permissions taken from the default user
     default_global_perms = UserToPerm.query() \
-        .filter(UserToPerm.user_id == default_user_id) \
+        .filter(UserToPerm.user_id == kallithea.DEFAULT_USER_ID) \
         .options(joinedload(UserToPerm.permission))
 
     for perm in default_global_perms:
@@ -197,27 +184,27 @@
 
     # defaults for repositories, taken from default user
     for perm in default_repo_perms:
-        r_k = perm.UserRepoToPerm.repository.repo_name
-        if perm.Repository.owner_id == user_id:
+        r_k = perm.repository.repo_name
+        if perm.repository.owner_id == user_id:
             p = 'repository.admin'
-        elif perm.Repository.private:
+        elif perm.repository.private:
             p = 'repository.none'
         else:
-            p = perm.Permission.permission_name
+            p = perm.permission.permission_name
         permissions[RK][r_k] = p
 
     # defaults for repository groups taken from default user permission
     # on given group
     for perm in default_repo_groups_perms:
-        rg_k = perm.UserRepoGroupToPerm.group.group_name
-        p = perm.Permission.permission_name
+        rg_k = perm.group.group_name
+        p = perm.permission.permission_name
         permissions[GK][rg_k] = p
 
     # defaults for user groups taken from default user permission
     # on given user group
     for perm in default_user_group_perms:
-        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
-        p = perm.Permission.permission_name
+        u_k = perm.user_group.users_group_name
+        p = perm.permission.permission_name
         permissions[UK][u_k] = p
 
     #======================================================================
@@ -271,30 +258,28 @@
 
     # user group for repositories permissions
     user_repo_perms_from_users_groups = \
-     Session().query(UserGroupRepoToPerm, Permission, Repository,) \
-        .join((Repository, UserGroupRepoToPerm.repository_id ==
-               Repository.repo_id)) \
-        .join((Permission, UserGroupRepoToPerm.permission_id ==
-               Permission.permission_id)) \
+     Session().query(UserGroupRepoToPerm) \
         .join((UserGroup, UserGroupRepoToPerm.users_group_id ==
                UserGroup.users_group_id)) \
         .filter(UserGroup.users_group_active == True) \
         .join((UserGroupMember, UserGroupRepoToPerm.users_group_id ==
                UserGroupMember.users_group_id)) \
         .filter(UserGroupMember.user_id == user_id) \
+        .options(joinedload(UserGroupRepoToPerm.repository)) \
+        .options(joinedload(UserGroupRepoToPerm.permission)) \
         .all()
 
     for perm in user_repo_perms_from_users_groups:
         bump_permission(RK,
-            perm.UserGroupRepoToPerm.repository.repo_name,
-            perm.Permission.permission_name)
+            perm.repository.repo_name,
+            perm.permission.permission_name)
 
     # user permissions for repositories
     user_repo_perms = Permission.get_default_perms(user_id)
     for perm in user_repo_perms:
         bump_permission(RK,
-            perm.UserRepoToPerm.repository.repo_name,
-            perm.Permission.permission_name)
+            perm.repository.repo_name,
+            perm.permission.permission_name)
 
     #======================================================================
     # !! PERMISSIONS FOR REPOSITORY GROUPS !!
@@ -305,59 +290,56 @@
     #======================================================================
     # user group for repo groups permissions
     user_repo_group_perms_from_users_groups = \
-     Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup) \
-     .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-     .join((Permission, UserGroupRepoGroupToPerm.permission_id
-            == Permission.permission_id)) \
+     Session().query(UserGroupRepoGroupToPerm) \
      .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id ==
             UserGroup.users_group_id)) \
      .filter(UserGroup.users_group_active == True) \
      .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id
             == UserGroupMember.users_group_id)) \
      .filter(UserGroupMember.user_id == user_id) \
+     .options(joinedload(UserGroupRepoGroupToPerm.permission)) \
      .all()
 
     for perm in user_repo_group_perms_from_users_groups:
         bump_permission(GK,
-            perm.UserGroupRepoGroupToPerm.group.group_name,
-            perm.Permission.permission_name)
+            perm.group.group_name,
+            perm.permission.permission_name)
 
     # user explicit permissions for repository groups
     user_repo_groups_perms = Permission.get_default_group_perms(user_id)
     for perm in user_repo_groups_perms:
         bump_permission(GK,
-            perm.UserRepoGroupToPerm.group.group_name,
-            perm.Permission.permission_name)
+            perm.group.group_name,
+            perm.permission.permission_name)
 
     #======================================================================
     # !! PERMISSIONS FOR USER GROUPS !!
     #======================================================================
     # user group for user group permissions
     user_group_user_groups_perms = \
-     Session().query(UserGroupUserGroupToPerm, Permission, UserGroup) \
+     Session().query(UserGroupUserGroupToPerm) \
      .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
             == UserGroup.users_group_id)) \
-     .join((Permission, UserGroupUserGroupToPerm.permission_id
-            == Permission.permission_id)) \
      .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
             == UserGroupMember.users_group_id)) \
      .filter(UserGroupMember.user_id == user_id) \
      .join((UserGroup, UserGroupMember.users_group_id ==
             UserGroup.users_group_id), aliased=True, from_joinpoint=True) \
      .filter(UserGroup.users_group_active == True) \
+     .options(joinedload(UserGroupUserGroupToPerm.permission)) \
      .all()
 
     for perm in user_group_user_groups_perms:
         bump_permission(UK,
-            perm.UserGroupUserGroupToPerm.target_user_group.users_group_name,
-            perm.Permission.permission_name)
+            perm.target_user_group.users_group_name,
+            perm.permission.permission_name)
 
     # user explicit permission for user groups
     user_user_groups_perms = Permission.get_default_user_group_perms(user_id)
     for perm in user_user_groups_perms:
         bump_permission(UK,
-            perm.UserUserGroupToPerm.user_group.users_group_name,
-            perm.Permission.permission_name)
+            perm.user_group.users_group_name,
+            perm.permission.permission_name)
 
     return permissions
 
@@ -405,7 +387,7 @@
         if not dbuser.active:
             log.info('Db user %s not active', dbuser.username)
             return None
-        allowed_ips = AuthUser.get_allowed_ips(dbuser.user_id, cache=True)
+        allowed_ips = AuthUser.get_allowed_ips(dbuser.user_id)
         if not check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
             log.info('Access for %s from %s forbidden - not in %s', dbuser.username, ip_addr, allowed_ips)
             return None
@@ -414,9 +396,8 @@
     def __init__(self, user_id=None, dbuser=None, is_external_auth=False):
         self.is_external_auth = is_external_auth # container auth - don't show logout option
 
-        # These attributes will be overridden by fill_data, below, unless the
-        # requested user cannot be found and the default anonymous user is
-        # not enabled.
+        # These attributes will be overridden below if the requested user is
+        # found or anonymous access (using the default user) is enabled.
         self.user_id = None
         self.username = None
         self.api_key = None
@@ -442,7 +423,7 @@
             self.is_default_user = False
         else:
             # copy non-confidential database fields from a `db.User` to this `AuthUser`.
-            for k, v in dbuser.get_dict().iteritems():
+            for k, v in dbuser.get_dict().items():
                 assert k not in ['api_keys', 'permissions']
                 setattr(self, k, v)
             self.is_default_user = dbuser.is_default_user
@@ -450,7 +431,15 @@
 
     @LazyProperty
     def permissions(self):
-        return self.__get_perms(user=self, cache=False)
+        """
+        Fills user permission attribute with permissions taken from database
+        works for permissions given for repositories, and for permissions that
+        are granted to groups
+
+        :param user: `AuthUser` instance
+        """
+        log.debug('Getting PERMISSION tree for %s', self)
+        return _cached_perms_data(self.user_id, self.is_admin)
 
     def has_repository_permission_level(self, repo_name, level, purpose=None):
         required_perms = {
@@ -492,22 +481,6 @@
     def api_keys(self):
         return self._get_api_keys()
 
-    def __get_perms(self, user, cache=False):
-        """
-        Fills user permission attribute with permissions taken from database
-        works for permissions given for repositories, and for permissions that
-        are granted to groups
-
-        :param user: `AuthUser` instance
-        """
-        user_id = user.user_id
-        user_is_admin = user.is_admin
-
-        log.debug('Getting PERMISSION tree')
-        compute = conditional_cache('short_term', 'cache_desc',
-                                    condition=cache, func=_cached_perms_data)
-        return compute(user_id, user_is_admin)
-
     def _get_api_keys(self):
         api_keys = [self.api_key]
         for api_key in UserApiKeys.query() \
@@ -525,7 +498,7 @@
         """
         Returns list of repositories you're an admin of
         """
-        return [x[0] for x in self.permissions['repositories'].iteritems()
+        return [x[0] for x in self.permissions['repositories'].items()
                 if x[1] == 'repository.admin']
 
     @property
@@ -533,7 +506,7 @@
         """
         Returns list of repository groups you're an admin of
         """
-        return [x[0] for x in self.permissions['repositories_groups'].iteritems()
+        return [x[0] for x in self.permissions['repositories_groups'].items()
                 if x[1] == 'group.admin']
 
     @property
@@ -541,11 +514,11 @@
         """
         Returns list of user groups you're an admin of
         """
-        return [x[0] for x in self.permissions['user_groups'].iteritems()
+        return [x[0] for x in self.permissions['user_groups'].items()
                 if x[1] == 'usergroup.admin']
 
     def __repr__(self):
-        return "<AuthUser('id:%s[%s]')>" % (self.user_id, self.username)
+        return "<%s %s: %r>" % (self.__class__.__name__, self.user_id, self.username)
 
     def to_cookie(self):
         """ Serializes this login session to a cookie `dict`. """
@@ -566,14 +539,10 @@
         )
 
     @classmethod
-    def get_allowed_ips(cls, user_id, cache=False):
+    def get_allowed_ips(cls, user_id):
         _set = set()
 
-        default_ips = UserIpMap.query().filter(UserIpMap.user_id ==
-                                        User.get_default_user(cache=True).user_id)
-        if cache:
-            default_ips = default_ips.options(FromCache("sql_cache_short",
-                                              "get_user_ips_default"))
+        default_ips = UserIpMap.query().filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID)
         for ip in default_ips:
             try:
                 _set.add(ip.ip_addr)
@@ -583,9 +552,6 @@
                 pass
 
         user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
-        if cache:
-            user_ips = user_ips.options(FromCache("sql_cache_short",
-                                                  "get_user_ips_%s" % user_id))
         for ip in user_ips:
             try:
                 _set.add(ip.ip_addr)
@@ -596,24 +562,6 @@
         return _set or set(['0.0.0.0/0', '::/0'])
 
 
-def set_available_permissions(config):
-    """
-    This function will propagate globals with all available defined
-    permission given in db. We don't want to check each time from db for new
-    permissions since adding a new permission also requires application restart
-    ie. to decorate new views with the newly created permission
-
-    :param config: current config instance
-
-    """
-    log.info('getting information about all available permissions')
-    try:
-        all_perms = Session().query(Permission).all()
-        config['available_permissions'] = [x.permission_name for x in all_perms]
-    finally:
-        Session.remove()
-
-
 #==============================================================================
 # CHECK DECORATORS
 #==============================================================================
@@ -778,7 +726,7 @@
     def __init__(self, *required_perms):
         self.required_perms = required_perms # usually very short - a list is thus fine
 
-    def __nonzero__(self):
+    def __bool__(self):
         """ Defend against accidentally forgetting to call the object
             and instead evaluating it directly in a boolean context,
             which could have security implications.
@@ -835,10 +783,6 @@
         self.required_perms = set(perms)
 
     def __call__(self, authuser, repo_name, purpose=None):
-        # repo_name MUST be unicode, since we handle keys in ok
-        # dict by unicode
-        repo_name = safe_unicode(repo_name)
-
         try:
             ok = authuser.permissions['repositories'][repo_name] in self.required_perms
         except KeyError:
--- a/kallithea/lib/auth_modules/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/auth_modules/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -20,7 +20,7 @@
 import traceback
 
 from kallithea.lib.auth import AuthUser, PasswordGenerator
-from kallithea.lib.compat import formatted_json, hybrid_property
+from kallithea.lib.compat import hybrid_property
 from kallithea.lib.utils2 import str2bool
 from kallithea.model.db import Setting, User
 from kallithea.model.meta import Session
@@ -136,9 +136,6 @@
                   username)
         if username:
             user = User.get_by_username_or_email(username)
-            if user is None:
-                log.debug('Fallback to fetch user in case insensitive mode')
-                user = User.get_by_username(username, case_insensitive=True)
         else:
             log.debug('provided username:`%s` is empty skipping...', username)
         return user
@@ -286,11 +283,11 @@
         ImportError -- if we couldn't import the plugin at all
     """
     log.debug("Importing %s", plugin)
-    if not plugin.startswith(u'kallithea.lib.auth_modules.auth_'):
-        parts = plugin.split(u'.lib.auth_modules.auth_', 1)
+    if not plugin.startswith('kallithea.lib.auth_modules.auth_'):
+        parts = plugin.split('.lib.auth_modules.auth_', 1)
         if len(parts) == 2:
             _module, pn = parts
-            plugin = u'kallithea.lib.auth_modules.auth_' + pn
+            plugin = 'kallithea.lib.auth_modules.auth_' + pn
     PLUGIN_CLASS_NAME = "KallitheaAuthPlugin"
     try:
         module = importlib.import_module(plugin)
@@ -309,7 +306,7 @@
                         "a subclass of %s" % (plugin, KallitheaAuthPluginBase))
 
     plugin = pluginclass()
-    if plugin.plugin_settings.im_func != KallitheaAuthPluginBase.plugin_settings.im_func:
+    if plugin.plugin_settings.__func__ != KallitheaAuthPluginBase.plugin_settings:
         raise TypeError("Authentication class %s.KallitheaAuthPluginBase "
                         "has overridden the plugin_settings method, which is "
                         "forbidden." % plugin)
@@ -351,7 +348,7 @@
             conf_key = "auth_%s_%s" % (plugin_name, v["name"])
             setting = Setting.get_by_name(conf_key)
             plugin_settings[v["name"]] = setting.app_settings_value if setting else None
-        log.debug('Settings for auth plugin %s:\n%s', plugin_name, formatted_json(plugin_settings))
+        log.debug('Settings for auth plugin %s: %s', plugin_name, plugin_settings)
 
         if not str2bool(plugin_settings["enabled"]):
             log.info("Authentication plugin %s is disabled, skipping for %s",
--- a/kallithea/lib/auth_modules/auth_container.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/auth_modules/auth_container.py	Sat May 02 21:20:43 2020 +0200
@@ -29,7 +29,7 @@
 
 from kallithea.lib import auth_modules
 from kallithea.lib.compat import hybrid_property
-from kallithea.lib.utils2 import safe_str, safe_unicode, str2bool
+from kallithea.lib.utils2 import str2bool
 from kallithea.model.db import Setting
 
 
@@ -180,7 +180,7 @@
         # only way to log in is using environ
         username = None
         if userobj:
-            username = safe_str(getattr(userobj, 'username'))
+            username = getattr(userobj, 'username')
 
         if not username:
             # we don't have any objects in DB, user doesn't exist, extract
@@ -199,8 +199,8 @@
 
         user_data = {
             'username': username,
-            'firstname': safe_unicode(firstname or username),
-            'lastname': safe_unicode(lastname or ''),
+            'firstname': firstname or username,
+            'lastname': lastname or '',
             'groups': [],
             'email': email or '',
             'admin': admin or False,
--- a/kallithea/lib/auth_modules/auth_crowd.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/auth_modules/auth_crowd.py	Sat May 02 21:20:43 2020 +0200
@@ -28,10 +28,12 @@
 
 import base64
 import logging
-import urllib2
+import urllib.parse
+import urllib.request
 
-from kallithea.lib import auth_modules
-from kallithea.lib.compat import formatted_json, hybrid_property, json
+from kallithea.lib import auth_modules, ext_json
+from kallithea.lib.compat import hybrid_property
+from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes
 
 
 log = logging.getLogger(__name__)
@@ -71,10 +73,10 @@
         self._make_opener()
 
     def _make_opener(self):
-        mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+        mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
         mgr.add_password(None, self._uri, self.user, self.passwd)
-        handler = urllib2.HTTPBasicAuthHandler(mgr)
-        self.opener = urllib2.build_opener(handler)
+        handler = urllib.request.HTTPBasicAuthHandler(mgr)
+        self.opener = urllib.request.build_opener(handler)
 
     def _request(self, url, body=None, headers=None,
                  method=None, noformat=False,
@@ -82,14 +84,12 @@
         _headers = {"Content-type": "application/json",
                     "Accept": "application/json"}
         if self.user and self.passwd:
-            authstring = base64.b64encode("%s:%s" % (self.user, self.passwd))
+            authstring = ascii_str(base64.b64encode(safe_bytes("%s:%s" % (self.user, self.passwd))))
             _headers["Authorization"] = "Basic %s" % authstring
         if headers:
             _headers.update(headers)
-        log.debug("Sent crowd: \n%s",
-                  formatted_json({"url": url, "body": body,
-                                           "headers": _headers}))
-        req = urllib2.Request(url, body, _headers)
+        log.debug("Sent to crowd at %s:\nHeaders: %s\nBody:\n%s", url, _headers, body)
+        req = urllib.request.Request(url, body, _headers)
         if method:
             req.get_method = lambda: method
 
@@ -103,7 +103,7 @@
                 rval["status"] = True
                 rval["error"] = "Response body was empty"
             elif not noformat:
-                rval = json.loads(msg)
+                rval = ext_json.loads(msg)
                 rval["status"] = True
             else:
                 rval = "".join(rdoc.readlines())
@@ -120,14 +120,14 @@
         """Authenticate a user against crowd. Returns brief information about
         the user."""
         url = ("%s/rest/usermanagement/%s/authentication?username=%s"
-               % (self._uri, self._version, urllib2.quote(username)))
-        body = json.dumps({"value": password})
+               % (self._uri, self._version, urllib.parse.quote(username)))
+        body = ascii_bytes(ext_json.dumps({"value": password}))
         return self._request(url, body)
 
     def user_groups(self, username):
         """Retrieve a list of groups to which this user belongs."""
         url = ("%s/rest/usermanagement/%s/user/group/nested?username=%s"
-               % (self._uri, self._version, urllib2.quote(username)))
+               % (self._uri, self._version, urllib.parse.quote(username)))
         return self._request(url)
 
 
@@ -209,11 +209,11 @@
             log.debug('Empty username or password skipping...')
             return None
 
-        log.debug("Crowd settings: \n%s", formatted_json(settings))
+        log.debug("Crowd settings: %s", settings)
         server = CrowdServer(**settings)
         server.set_credentials(settings["app_name"], settings["app_password"])
         crowd_user = server.user_auth(username, password)
-        log.debug("Crowd returned: \n%s", formatted_json(crowd_user))
+        log.debug("Crowd returned: %s", crowd_user)
         if not crowd_user["status"]:
             log.error('Crowd authentication as %s returned no status', username)
             return None
@@ -223,7 +223,7 @@
             return None
 
         res = server.user_groups(crowd_user["name"])
-        log.debug("Crowd groups: \n%s", formatted_json(res))
+        log.debug("Crowd groups: %s", res)
         crowd_user["groups"] = [x["name"] for x in res["groups"]]
 
         # old attrs fetched from Kallithea database
@@ -246,7 +246,7 @@
         for group in settings["admin_groups"].split(","):
             if group in user_data["groups"]:
                 user_data["admin"] = True
-        log.debug("Final crowd user object: \n%s", formatted_json(user_data))
+        log.debug("Final crowd user object: %s", user_data)
         log.info('user %s authenticated correctly', user_data['username'])
         return user_data
 
--- a/kallithea/lib/auth_modules/auth_internal.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/auth_modules/auth_internal.py	Sat May 02 21:20:43 2020 +0200
@@ -29,8 +29,7 @@
 import logging
 
 from kallithea.lib import auth_modules
-from kallithea.lib.compat import formatted_json, hybrid_property
-from kallithea.model.db import User
+from kallithea.lib.compat import hybrid_property
 
 
 log = logging.getLogger(__name__)
@@ -77,7 +76,7 @@
             "admin": userobj.admin,
             "extern_name": userobj.user_id,
         }
-        log.debug(formatted_json(user_data))
+        log.debug('user data: %s', user_data)
 
         from kallithea.lib import auth
         password_match = auth.check_password(password, userobj.password)
--- a/kallithea/lib/auth_modules/auth_ldap.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/auth_modules/auth_ldap.py	Sat May 02 21:20:43 2020 +0200
@@ -31,7 +31,7 @@
 from kallithea.lib import auth_modules
 from kallithea.lib.compat import hybrid_property
 from kallithea.lib.exceptions import LdapConnectionError, LdapImportError, LdapPasswordError, LdapUsernameError
-from kallithea.lib.utils2 import safe_str, safe_unicode
+from kallithea.lib.utils2 import safe_str
 
 
 log = logging.getLogger(__name__)
@@ -70,11 +70,11 @@
                             port)
             for host in server.split(',')))
 
-        self.LDAP_BIND_DN = safe_str(bind_dn)
-        self.LDAP_BIND_PASS = safe_str(bind_pass)
+        self.LDAP_BIND_DN = bind_dn
+        self.LDAP_BIND_PASS = bind_pass
 
-        self.BASE_DN = safe_str(base_dn)
-        self.LDAP_FILTER = safe_str(ldap_filter)
+        self.BASE_DN = base_dn
+        self.LDAP_FILTER = ldap_filter
         self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope)
         self.attr_login = attr_login
 
@@ -139,7 +139,7 @@
 
                 try:
                     log.debug('Trying simple bind with %s', dn)
-                    server.simple_bind_s(dn, safe_str(password))
+                    server.simple_bind_s(dn, password)
                     results = server.search_ext_s(dn, ldap.SCOPE_BASE,
                                                   '(objectClass=*)')
                     if len(results) == 1:
@@ -328,7 +328,8 @@
             (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password)
             log.debug('Got ldap DN response %s', user_dn)
 
-            get_ldap_attr = lambda k: ldap_attrs.get(settings.get(k), [''])[0]
+            def get_ldap_attr(k):
+                return safe_str(ldap_attrs.get(settings.get(k), [b''])[0])
 
             # old attrs fetched from Kallithea database
             admin = getattr(userobj, 'admin', False)
@@ -338,8 +339,8 @@
 
             user_data = {
                 'username': username,
-                'firstname': safe_unicode(get_ldap_attr('attr_firstname') or firstname),
-                'lastname': safe_unicode(get_ldap_attr('attr_lastname') or lastname),
+                'firstname': get_ldap_attr('attr_firstname') or firstname,
+                'lastname': get_ldap_attr('attr_lastname') or lastname,
                 'groups': [],
                 'email': get_ldap_attr('attr_email') or email,
                 'admin': admin,
--- a/kallithea/lib/auth_modules/auth_pam.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/auth_modules/auth_pam.py	Sat May 02 21:20:43 2020 +0200
@@ -32,7 +32,7 @@
 import time
 
 from kallithea.lib import auth_modules
-from kallithea.lib.compat import formatted_json, hybrid_property
+from kallithea.lib.compat import hybrid_property
 
 
 try:
@@ -142,7 +142,7 @@
             log.warning("Cannot extract additional info for PAM user %s", username)
             pass
 
-        log.debug("pamuser: \n%s", formatted_json(user_data))
+        log.debug("pamuser: %s", user_data)
         log.info('user %s authenticated correctly', user_data['username'])
         return user_data
 
--- a/kallithea/lib/base.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/base.py	Sat May 02 21:20:43 2020 +0200
@@ -28,9 +28,9 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
+import base64
 import datetime
 import logging
-import time
 import traceback
 import warnings
 
@@ -45,12 +45,11 @@
 
 from kallithea import BACKENDS, __version__
 from kallithea.config.routing import url
-from kallithea.lib import auth_modules
+from kallithea.lib import auth_modules, ext_json
 from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
-from kallithea.lib.compat import json
 from kallithea.lib.exceptions import UserCreationError
 from kallithea.lib.utils import get_repo_slug, is_valid_repo
-from kallithea.lib.utils2 import AttributeDict, safe_int, safe_str, safe_unicode, set_hook_environment, str2bool
+from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, safe_str, set_hook_environment, str2bool
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
 from kallithea.model import meta
 from kallithea.model.db import PullRequest, Repository, Setting, User
@@ -97,12 +96,18 @@
     return _filter_proxy(ip)
 
 
-def _get_access_path(environ):
-    """Return PATH_INFO from environ ... using tg.original_request if available."""
+def get_path_info(environ):
+    """Return PATH_INFO from environ ... using tg.original_request if available.
+
+    In Python 3 WSGI, PATH_INFO is a unicode str, but kind of contains encoded
+    bytes. The code points are guaranteed to only use the lower 8 bit bits, and
+    encoding the string with the 1:1 encoding latin1 will give the
+    corresponding byte string ... which then can be decoded to proper unicode.
+    """
     org_req = environ.get('tg.original_request')
     if org_req is not None:
         environ = org_req.environ
-    return environ.get('PATH_INFO')
+    return safe_str(environ['PATH_INFO'].encode('latin1'))
 
 
 def log_in_user(user, remember, is_external_auth, ip_addr):
@@ -172,7 +177,7 @@
         (authmeth, auth) = authorization.split(' ', 1)
         if 'basic' != authmeth.lower():
             return self.build_authentication(environ)
-        auth = auth.strip().decode('base64')
+        auth = safe_str(base64.b64decode(auth.strip()))
         _parts = auth.split(':', 1)
         if len(_parts) == 2:
             username, password = _parts
@@ -218,7 +223,7 @@
         Returns (None, wsgi_app) to send the wsgi_app response to the client.
         """
         # Use anonymous access if allowed for action on repo.
-        default_user = User.get_default_user(cache=True)
+        default_user = User.get_default_user()
         default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
         if default_authuser is None:
             log.debug('No anonymous access at all') # move on to proper user auth
@@ -242,7 +247,7 @@
 
         # If not authenticated by the container, running basic auth
         if not username:
-            self.authenticate.realm = safe_str(self.config['realm'])
+            self.authenticate.realm = self.config['realm']
             result = self.authenticate(environ)
             if isinstance(result, str):
                 paste.httpheaders.AUTH_TYPE.update(environ, 'basic')
@@ -273,11 +278,8 @@
 
     def _check_permission(self, action, authuser, repo_name):
         """
-        Checks permissions using action (push/pull) user and repository
-        name
-
-        :param action: 'push' or 'pull' action
-        :param user: `User` instance
+        :param action: 'push' or 'pull'
+        :param user: `AuthUser` instance
         :param repo_name: repository name
         """
         if action == 'push':
@@ -286,7 +288,7 @@
                                                                   repo_name):
                 return False
 
-        else:
+        elif action == 'pull':
             #any other action need at least read permission
             if not HasPermissionAnyMiddleware('repository.read',
                                               'repository.write',
@@ -294,13 +296,15 @@
                                                                   repo_name):
                 return False
 
+        else:
+            assert False, action
+
         return True
 
     def _get_ip_addr(self, environ):
         return _get_ip_addr(environ)
 
     def __call__(self, environ, start_response):
-        start = time.time()
         try:
             # try parsing a request for this VCS - if it fails, call the wrapped app
             parsed_request = self.parse_request(environ)
@@ -334,7 +338,7 @@
 
             try:
                 log.info('%s action on %s repo "%s" by "%s" from %s',
-                         parsed_request.action, self.scm_alias, parsed_request.repo_name, safe_str(user.username), ip_addr)
+                         parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr)
                 app = self._make_app(parsed_request)
                 return app(environ, start_response)
             except Exception:
@@ -343,10 +347,6 @@
 
         except webob.exc.HTTPException as e:
             return e(environ, start_response)
-        finally:
-            log_ = logging.getLogger('kallithea.' + self.__class__.__name__)
-            log_.debug('Request time: %.3fs', time.time() - start)
-            meta.Session.remove()
 
 
 class BaseController(TGController):
@@ -413,7 +413,7 @@
         # END CONFIG VARS
 
         c.repo_name = get_repo_slug(request)  # can be empty
-        c.backends = BACKENDS.keys()
+        c.backends = list(BACKENDS)
 
         self.cut_off_limit = safe_int(config.get('cut_off_limit'))
 
@@ -454,7 +454,7 @@
                     return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr)
 
         # User is default user (if active) or anonymous
-        default_user = User.get_default_user(cache=True)
+        default_user = User.get_default_user()
         authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
         if authuser is None: # fall back to anonymous
             authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make?
@@ -529,9 +529,9 @@
             request.ip_addr = ip_addr
             request.needs_csrf_check = needs_csrf_check
 
-            log.info('IP: %s User: %s accessed %s',
+            log.info('IP: %s User: %s Request: %s',
                 request.ip_addr, request.authuser,
-                safe_unicode(_get_access_path(environ)),
+                get_path_info(environ),
             )
             return super(BaseController, self).__call__(environ, context)
         except webob.exc.HTTPException as e:
@@ -552,13 +552,13 @@
 
     def _before(self, *args, **kwargs):
         super(BaseRepoController, self)._before(*args, **kwargs)
-        if c.repo_name:  # extracted from routes
+        if c.repo_name:  # extracted from request by base-base BaseController._before
             _dbr = Repository.get_by_repo_name(c.repo_name)
             if not _dbr:
                 return
 
             log.debug('Found repository in database %s with state `%s`',
-                      safe_unicode(_dbr), safe_unicode(_dbr.repo_state))
+                      _dbr, _dbr.repo_state)
             route = getattr(request.environ.get('routes.route'), 'name', '')
 
             # allow to delete repos that are somehow damages in filesystem
@@ -608,7 +608,7 @@
             raise webob.exc.HTTPNotFound()
         except RepositoryError as e:
             log.error(traceback.format_exc())
-            h.flash(safe_str(e), category='error')
+            h.flash(e, category='error')
             raise webob.exc.HTTPBadRequest()
 
 
@@ -634,7 +634,7 @@
         warnings.warn(msg, Warning, 2)
         log.warning(msg)
     log.debug("Returning JSON wrapped action output")
-    return json.dumps(data, encoding='utf-8')
+    return ascii_bytes(ext_json.dumps(data))
 
 @decorator.decorator
 def IfSshEnabled(func, *args, **kwargs):
--- a/kallithea/lib/caching_query.py	Thu Apr 09 18:03:56 2020 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,240 +0,0 @@
-"""caching_query.py
-
-Represent persistence structures which allow the usage of
-Beaker caching with SQLAlchemy.
-
-The three new concepts introduced here are:
-
- * CachingQuery - a Query subclass that caches and
-   retrieves results in/from Beaker.
- * FromCache - a query option that establishes caching
-   parameters on a Query
- * _params_from_query - extracts value parameters from
-   a Query.
-
-The rest of what's here are standard SQLAlchemy and
-Beaker constructs.
-
-"""
-import beaker
-from beaker.exceptions import BeakerException
-from sqlalchemy.orm.interfaces import MapperOption
-from sqlalchemy.orm.query import Query
-from sqlalchemy.sql import visitors
-
-from kallithea.lib.utils2 import safe_str
-
-
-class CachingQuery(Query):
-    """A Query subclass which optionally loads full results from a Beaker
-    cache region.
-
-    The CachingQuery stores additional state that allows it to consult
-    a Beaker cache before accessing the database:
-
-    * A "region", which is a cache region argument passed to a
-      Beaker CacheManager, specifies a particular cache configuration
-      (including backend implementation, expiration times, etc.)
-    * A "namespace", which is a qualifying name that identifies a
-      group of keys within the cache.  A query that filters on a name
-      might use the name "by_name", a query that filters on a date range
-      to a joined table might use the name "related_date_range".
-
-    When the above state is present, a Beaker cache is retrieved.
-
-    The "namespace" name is first concatenated with
-    a string composed of the individual entities and columns the Query
-    requests, i.e. such as ``Query(User.id, User.name)``.
-
-    The Beaker cache is then loaded from the cache manager based
-    on the region and composed namespace.  The key within the cache
-    itself is then constructed against the bind parameters specified
-    by this query, which are usually literals defined in the
-    WHERE clause.
-
-    The FromCache mapper option below represent
-    the "public" method of configuring this state upon the CachingQuery.
-
-    """
-
-    def __init__(self, manager, *args, **kw):
-        self.cache_manager = manager
-        Query.__init__(self, *args, **kw)
-
-    def __iter__(self):
-        """override __iter__ to pull results from Beaker
-           if particular attributes have been configured.
-
-           Note that this approach does *not* detach the loaded objects from
-           the current session. If the cache backend is an in-process cache
-           (like "memory") and lives beyond the scope of the current session's
-           transaction, those objects may be expired. The method here can be
-           modified to first expunge() each loaded item from the current
-           session before returning the list of items, so that the items
-           in the cache are not the same ones in the current Session.
-
-        """
-        if hasattr(self, '_cache_parameters'):
-            return self.get_value(createfunc=lambda:
-                                  list(Query.__iter__(self)))
-        else:
-            return Query.__iter__(self)
-
-    def invalidate(self):
-        """Invalidate the value represented by this Query."""
-
-        cache, cache_key = _get_cache_parameters(self)
-        cache.remove(cache_key)
-
-    def get_value(self, merge=True, createfunc=None):
-        """Return the value from the cache for this query.
-
-        Raise KeyError if no value present and no
-        createfunc specified.
-
-        """
-        cache, cache_key = _get_cache_parameters(self)
-        ret = cache.get_value(cache_key, createfunc=createfunc)
-        if merge:
-            ret = self.merge_result(ret, load=False)
-        return ret
-
-    def set_value(self, value):
-        """Set the value in the cache for this query."""
-
-        cache, cache_key = _get_cache_parameters(self)
-        cache.put(cache_key, value)
-
-
-def query_callable(manager, query_cls=CachingQuery):
-    def query(*arg, **kw):
-        return query_cls(manager, *arg, **kw)
-    return query
-
-
-def get_cache_region(name, region):
-    if region not in beaker.cache.cache_regions:
-        raise BeakerException('Cache region `%s` not configured '
-            'Check if proper cache settings are in the .ini files' % region)
-    kw = beaker.cache.cache_regions[region]
-    return beaker.cache.Cache._get_cache(name, kw)
-
-
-def _get_cache_parameters(query):
-    """For a query with cache_region and cache_namespace configured,
-    return the corresponding Cache instance and cache key, based
-    on this query's current criterion and parameter values.
-
-    """
-    if not hasattr(query, '_cache_parameters'):
-        raise ValueError("This Query does not have caching "
-                         "parameters configured.")
-
-    region, namespace, cache_key = query._cache_parameters
-
-    namespace = _namespace_from_query(namespace, query)
-
-    if cache_key is None:
-        # cache key - the value arguments from this query's parameters.
-        args = [safe_str(x) for x in _params_from_query(query)]
-        args.extend(filter(lambda k: k not in ['None', None, u'None'],
-                           [str(query._limit), str(query._offset)]))
-
-        cache_key = " ".join(args)
-
-    if cache_key is None:
-        raise Exception('Cache key cannot be None')
-
-    # get cache
-    #cache = query.cache_manager.get_cache_region(namespace, region)
-    cache = get_cache_region(namespace, region)
-    # optional - hash the cache_key too for consistent length
-    # import uuid
-    # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
-
-    return cache, cache_key
-
-
-def _namespace_from_query(namespace, query):
-    # cache namespace - the token handed in by the
-    # option + class we're querying against
-    namespace = " ".join([namespace] + [str(x) for x in query._entities])
-
-    # memcached wants this
-    namespace = namespace.replace(' ', '_')
-
-    return namespace
-
-
-def _set_cache_parameters(query, region, namespace, cache_key):
-
-    if hasattr(query, '_cache_parameters'):
-        region, namespace, cache_key = query._cache_parameters
-        raise ValueError("This query is already configured "
-                        "for region %r namespace %r" %
-                        (region, namespace)
-                    )
-    query._cache_parameters = region, safe_str(namespace), cache_key
-
-
-class FromCache(MapperOption):
-    """Specifies that a Query should load results from a cache."""
-
-    propagate_to_loaders = False
-
-    def __init__(self, region, namespace, cache_key=None):
-        """Construct a new FromCache.
-
-        :param region: the cache region.  Should be a
-        region configured in the Beaker CacheManager.
-
-        :param namespace: the cache namespace.  Should
-        be a name uniquely describing the target Query's
-        lexical structure.
-
-        :param cache_key: optional.  A string cache key
-        that will serve as the key to the query.   Use this
-        if your query has a huge amount of parameters (such
-        as when using in_()) which correspond more simply to
-        some other identifier.
-
-        """
-        self.region = region
-        self.namespace = namespace
-        self.cache_key = cache_key
-
-    def process_query(self, query):
-        """Process a Query during normal loading operation."""
-
-        _set_cache_parameters(query, self.region, self.namespace,
-                              self.cache_key)
-
-
-def _params_from_query(query):
-    """Pull the bind parameter values from a query.
-
-    This takes into account any scalar attribute bindparam set up.
-
-    E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
-    would return [5, 7].
-
-    """
-    v = []
-
-    def visit_bindparam(bind):
-        if bind.key in query._params:
-            value = query._params[bind.key]
-        elif bind.callable:
-            # lazyloader may dig a callable in here, intended
-            # to late-evaluate params after autoflush is called.
-            # convert to a scalar value.
-            value = bind.callable()
-        else:
-            value = bind.value
-
-        v.append(value)
-    if query._criterion is not None:
-        visitors.traverse(query._criterion, {}, {'bindparam': visit_bindparam})
-    for f in query._from_obj:
-        visitors.traverse(f, {}, {'bindparam': visit_bindparam})
-    return v
--- a/kallithea/lib/celerylib/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/celerylib/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -33,9 +33,9 @@
 from decorator import decorator
 from tg import config
 
-from kallithea import CELERY_EAGER, CELERY_ON
+import kallithea
 from kallithea.lib.pidlock import DaemonLock, LockHeld
-from kallithea.lib.utils2 import safe_str
+from kallithea.lib.utils2 import safe_bytes
 from kallithea.model import meta
 
 
@@ -57,10 +57,10 @@
 
 
 def task(f_org):
-    """Wrapper of celery.task.task, running async if CELERY_ON
+    """Wrapper of celery.task.task, running async if CELERY_APP
     """
 
-    if CELERY_ON:
+    if kallithea.CELERY_APP:
         def f_async(*args, **kwargs):
             log.info('executing %s task', f_org.__name__)
             try:
@@ -68,8 +68,7 @@
             finally:
                 log.info('executed %s task', f_org.__name__)
         f_async.__name__ = f_org.__name__
-        from kallithea.lib import celerypylons
-        runner = celerypylons.task(ignore_result=True)(f_async)
+        runner = kallithea.CELERY_APP.task(ignore_result=True)(f_async)
 
         def f_wrapped(*args, **kwargs):
             t = runner.apply_async(args=args, kwargs=kwargs)
@@ -95,7 +94,7 @@
     func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
 
     lockkey = 'task_%s.lock' % \
-        md5(func_name + '-' + '-'.join(map(safe_str, params))).hexdigest()
+        md5(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest()
     return lockkey
 
 
@@ -128,7 +127,7 @@
             ret = func(*fargs, **fkwargs)
             return ret
         finally:
-            if CELERY_ON and not CELERY_EAGER:
+            if kallithea.CELERY_APP and not kallithea.CELERY_EAGER:
                 meta.Session.remove()
 
     return decorator(__wrapper, func)
--- a/kallithea/lib/celerylib/tasks.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/celerylib/tasks.py	Sat May 02 21:20:43 2020 +0200
@@ -26,24 +26,23 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import logging
+import email.utils
 import os
-import rfc822
 import traceback
 from collections import OrderedDict
 from operator import itemgetter
 from time import mktime
 
+import celery.utils.log
 from tg import config
 
-from kallithea import CELERY_ON
-from kallithea.lib import celerylib
-from kallithea.lib.compat import json
+import kallithea
+from kallithea.lib import celerylib, ext_json
 from kallithea.lib.helpers import person
 from kallithea.lib.hooks import log_create_repository
 from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
 from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import str2bool
+from kallithea.lib.utils2 import ascii_bytes, str2bool
 from kallithea.lib.vcs.utils import author_email
 from kallithea.model.db import RepoGroup, Repository, Statistics, User
 
@@ -51,7 +50,7 @@
 __all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
 
 
-log = logging.getLogger(__name__)
+log = celery.utils.log.get_task_logger(__name__)
 
 
 @celerylib.task
@@ -67,6 +66,11 @@
                          .run(full_index=full_index)
 
 
+# for js data compatibility cleans the key for person from '
+def akc(k):
+    return person(k).replace('"', '')
+
+
 @celerylib.task
 @celerylib.dbsession
 def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
@@ -80,9 +84,6 @@
     try:
         lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey))
 
-        # for js data compatibility cleans the key for person from '
-        akc = lambda k: person(k).replace('"', "")
-
         co_day_auth_aggr = {}
         commits_by_day_aggregate = {}
         repo = Repository.get_by_repo_name(repo_name)
@@ -118,22 +119,21 @@
             return True
 
         if cur_stats:
-            commits_by_day_aggregate = OrderedDict(json.loads(
+            commits_by_day_aggregate = OrderedDict(ext_json.loads(
                                         cur_stats.commit_activity_combined))
-            co_day_auth_aggr = json.loads(cur_stats.commit_activity)
+            co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)
 
         log.debug('starting parsing %s', parse_limit)
-        lmktime = mktime
 
-        last_rev = last_rev + 1 if last_rev >= 0 else 0
+        last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
         log.debug('Getting revisions from %s to %s',
              last_rev, last_rev + parse_limit
         )
         for cs in repo[last_rev:last_rev + parse_limit]:
             log.debug('parsing %s', cs)
             last_cs = cs  # remember last parsed changeset
-            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
-                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
+            tt = cs.date.timetuple()
+            k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))
 
             if akc(cs.author) in co_day_auth_aggr:
                 try:
@@ -143,8 +143,7 @@
                 except ValueError:
                     time_pos = None
 
-                if time_pos >= 0 and time_pos is not None:
-
+                if time_pos is not None and time_pos >= 0:
                     datadict = \
                         co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
 
@@ -195,8 +194,8 @@
             }
 
         stats = cur_stats if cur_stats else Statistics()
-        stats.commit_activity = json.dumps(co_day_auth_aggr)
-        stats.commit_activity_combined = json.dumps(overview_data)
+        stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
+        stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data))
 
         log.debug('last revision %s', last_rev)
         leftovers = len(repo.revisions[last_rev:])
@@ -204,7 +203,7 @@
 
         if last_rev == 0 or leftovers < parse_limit:
             log.debug('getting code trending stats')
-            stats.languages = json.dumps(__get_codes_stats(repo_name))
+            stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name)))
 
         try:
             stats.repository = dbrepo
@@ -221,7 +220,7 @@
         lock.release()
 
         # execute another task if celery is enabled
-        if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
+        if len(repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0:
             get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
         elif recurse_limit <= 0:
             log.debug('Not recursing - limit has been reached')
@@ -234,7 +233,7 @@
 
 @celerylib.task
 @celerylib.dbsession
-def send_email(recipients, subject, body='', html_body='', headers=None, author=None):
+def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
     """
     Sends an email with defined parameters from the .ini files.
 
@@ -244,7 +243,8 @@
     :param body: body of the mail
     :param html_body: html version of body
     :param headers: dictionary of prepopulated e-mail headers
-    :param author: User object of the author of this mail, if known and relevant
+    :param from_name: full name to be used as sender of this mail - often a
+    .full_name_or_username value
     """
     assert isinstance(recipients, list), recipients
     if headers is None:
@@ -276,13 +276,13 @@
     # SMTP sender
     envelope_from = email_config.get('app_email_from', 'Kallithea')
     # 'From' header
-    if author is not None:
-        # set From header based on author but with a generic e-mail address
+    if from_name is not None:
+        # set From header based on from_name but with a generic e-mail address
         # In case app_email_from is in "Some Name <e-mail>" format, we first
         # extract the e-mail address.
         envelope_addr = author_email(envelope_from)
         headers['From'] = '"%s" <%s>' % (
-            rfc822.quote('%s (no-reply)' % author.full_name_or_username),
+            email.utils.quote('%s (no-reply)' % from_name),
             envelope_addr)
 
     user = email_config.get('smtp_username')
@@ -414,7 +414,7 @@
 
     DBS = celerylib.get_session()
 
-    base_path = Repository.base_path()
+    base_path = kallithea.CONFIG['base_path']
     cur_user = User.guess_instance(cur_user)
 
     repo_name = form_data['repo_name']  # fork in this case
@@ -489,7 +489,7 @@
     for _topnode, _dirnodes, filenodes in tip.walk('/'):
         for filenode in filenodes:
             ext = filenode.extension.lower()
-            if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not filenode.is_binary:
+            if ext in LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary:
                 if ext in code_stats:
                     code_stats[ext] += 1
                 else:
--- a/kallithea/lib/celerypylons/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/celerypylons/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -14,34 +14,64 @@
 mandatory settings.
 """
 
+import logging
+
 import celery
-import celery.result as result
 import tg
-from celery.bin import worker
-from celery.task import task
+
+import kallithea
 
 
-def celery_config(config):
-    """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
+class CeleryConfig(object):
+    imports = ['kallithea.lib.celerylib.tasks']
+    task_always_eager = False
 
-    # Verify .ini file configuration has been loaded
-    assert config['celery.imports'] == 'kallithea.lib.celerylib.tasks', 'Kallithea Celery configuration has not been loaded'
+# map from Kallithea .ini Celery 3 config names to Celery 4 config names
+celery3_compat = {
+    'broker.url': 'broker_url',
+    'celery.accept.content': 'accept_content',
+    'celery.always.eager': 'task_always_eager',
+    'celery.amqp.task.result.expires': 'result_expires',
+    'celeryd.concurrency': 'worker_concurrency',
+    'celeryd.max.tasks.per.child': 'worker_max_tasks_per_child',
+    #'celery.imports' ends up unchanged
+    'celery.result.backend': 'result_backend',
+    'celery.result.serializer': 'result_serializer',
+    'celery.task.serializer': 'task_serializer',
+}
 
-    class CeleryConfig(object):
-        pass
+list_config_names = """imports accept_content""".split()
+
+
+desupported = set([
+    'celery.result.dburi',
+    'celery.result.serialier',
+    'celery.send.task.error.emails',
+])
+
+
+log = logging.getLogger(__name__)
+
+
+def make_celery_config(config):
+    """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
 
     celery_config = CeleryConfig()
 
-    PREFIXES = """ADMINS BROKER CASSANDRA CELERYBEAT CELERYD CELERYMON CELERY EMAIL SERVER""".split()
-    LIST_PARAMS = """CELERY_IMPORTS ADMINS ROUTES CELERY_ACCEPT_CONTENT""".split()
-
     for config_key, config_value in sorted(config.items()):
-        celery_key = config_key.replace('.', '_').upper()
-        if celery_key.split('_', 1)[0] not in PREFIXES:
+        if config_key in desupported and config_value:
+            log.error('Celery configuration setting %r is no longer supported', config_key)
+        celery_key = celery3_compat.get(config_key)
+        parts = config_key.split('.', 1)
+        if celery_key:  # explicit Celery 3 backwards compatibility
+            pass
+        elif parts[0] == 'celery' and len(parts) == 2:  # Celery 4 config key
+            celery_key = parts[1]
+        else:
             continue
-        if not isinstance(config_value, basestring):
+        if not isinstance(config_value, str):
             continue
-        if celery_key in LIST_PARAMS:
+        if celery_key in list_config_names:
             celery_value = config_value.split()
         elif config_value.isdigit():
             celery_value = int(config_value)
@@ -53,6 +83,10 @@
     return celery_config
 
 
-# Create celery app from the TurboGears configuration file
-app = celery.Celery()
-app.config_from_object(celery_config(tg.config))
+def make_app():
+    """Create celery app from the TurboGears configuration file"""
+    app = celery.Celery()
+    celery_config = make_celery_config(tg.config)
+    kallithea.CELERY_EAGER = celery_config.task_always_eager
+    app.config_from_object(celery_config)
+    return app
--- a/kallithea/lib/colored_formatter.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/colored_formatter.py	Sat May 02 21:20:43 2020 +0200
@@ -15,7 +15,7 @@
 import logging
 
 
-BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
+BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38)
 
 # Sequences
 RESET_SEQ = "\033[0m"
--- a/kallithea/lib/compat.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/compat.py	Sat May 02 21:20:43 2020 +0200
@@ -29,7 +29,6 @@
 
 import functools
 import os
-import sys
 
 #==============================================================================
 # Hybrid property/method
@@ -43,15 +42,10 @@
 #==============================================================================
 # json
 #==============================================================================
-from kallithea.lib.ext_json import json
+from kallithea.lib import ext_json
 
 
-# alias for formatted json
-formatted_json = functools.partial(json.dumps, indent=4, sort_keys=True)
-
-
-
-
+formatted_json = functools.partial(ext_json.dumps, indent=4, sort_keys=True)
 
 
 #==============================================================================
@@ -68,3 +62,8 @@
 
 else:
     kill = os.kill
+
+
+# mute pyflakes "imported but unused"
+assert hybrid_property
+assert OrderedSet
--- a/kallithea/lib/db_manage.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/db_manage.py	Sat May 02 21:20:43 2020 +0200
@@ -26,8 +26,6 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-from __future__ import print_function
-
 import logging
 import os
 import sys
@@ -56,7 +54,6 @@
         self.tests = tests
         self.root = root
         self.dburi = dbconf
-        self.db_exists = False
         self.cli_args = cli_args or {}
         self.init_db(SESSION=SESSION)
 
@@ -189,7 +186,7 @@
 
                 return password
             if username is None:
-                username = raw_input('Specify admin username:')
+                username = input('Specify admin username:')
             if password is None:
                 password = get_password()
                 if not password:
@@ -198,7 +195,7 @@
                     if not password:
                         sys.exit()
             if email is None:
-                email = raw_input('Specify admin email:')
+                email = input('Specify admin email:')
             self.create_user(username, password, email, True)
         else:
             log.info('creating admin and regular test users')
@@ -294,7 +291,7 @@
         if _path is not None:
             path = _path
         elif not self.tests and not test_repo_path:
-            path = raw_input(
+            path = input(
                  'Enter a valid absolute path to store repositories. '
                  'All repositories in that path will be added automatically:'
             )
@@ -385,18 +382,18 @@
     def create_user(self, username, password, email='', admin=False):
         log.info('creating user %s', username)
         UserModel().create_or_update(username, password, email,
-                                     firstname=u'Kallithea', lastname=u'Admin',
+                                     firstname='Kallithea', lastname='Admin',
                                      active=True, admin=admin,
                                      extern_type=User.DEFAULT_AUTH_TYPE)
 
     def create_default_user(self):
         log.info('creating default user')
         # create default user for handling default permissions.
-        user = UserModel().create_or_update(username=User.DEFAULT_USER,
+        user = UserModel().create_or_update(username=User.DEFAULT_USER_NAME,
                                             password=str(uuid.uuid1())[:20],
                                             email='anonymous@kallithea-scm.org',
-                                            firstname=u'Anonymous',
-                                            lastname=u'User')
+                                            firstname='Anonymous',
+                                            lastname='User')
         # based on configuration options activate/deactivate this user which
         # controls anonymous access
         if self.cli_args.get('public_access') is False:
@@ -419,4 +416,4 @@
         permissions that are missing, and not alter already defined ones
         """
         log.info('creating default user permissions')
-        PermissionModel().create_default_permissions(user=User.DEFAULT_USER)
+        PermissionModel().create_default_permissions(user=User.DEFAULT_USER_NAME)
--- a/kallithea/lib/diffs.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/diffs.py	Sat May 02 21:20:43 2020 +0200
@@ -32,7 +32,7 @@
 from tg.i18n import ugettext as _
 
 from kallithea.lib import helpers as h
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import VCSError
 from kallithea.lib.vcs.nodes import FileNode, SubModuleNode
@@ -216,8 +216,7 @@
         stats = (0, 0)
 
     if not html_diff:
-        submodules = filter(lambda o: isinstance(o, SubModuleNode),
-                            [filenode_new, filenode_old])
+        submodules = [o for o in [filenode_new, filenode_old] if isinstance(o, SubModuleNode)]
         if submodules:
             html_diff = wrap_to_table(h.escape('Submodule %r' % submodules[0]))
         else:
@@ -235,10 +234,9 @@
     """
     # make sure we pass in default context
     context = context or 3
-    submodules = filter(lambda o: isinstance(o, SubModuleNode),
-                        [filenode_new, filenode_old])
+    submodules = [o for o in [filenode_new, filenode_old] if isinstance(o, SubModuleNode)]
     if submodules:
-        return ''
+        return b''
 
     for filenode in (filenode_old, filenode_new):
         if not isinstance(filenode, FileNode):
@@ -263,7 +261,7 @@
                                      ignore_whitespace=ignore_whitespace, context=context)
     except MemoryError:
         h.flash('MemoryError: Diff is too big', category='error')
-        return ''
+        return b''
 
 
 NEW_FILENODE = 1
@@ -281,7 +279,7 @@
     mentioned in the diff together with a dict of meta information that
     can be used to render it in a HTML template.
     """
-    _diff_git_re = re.compile('^diff --git', re.MULTILINE)
+    _diff_git_re = re.compile(b'^diff --git', re.MULTILINE)
 
     def __init__(self, diff, vcs='hg', diff_limit=None, inline_diff=True):
         """
@@ -291,10 +289,10 @@
             based on that parameter cut off will be triggered, set to None
             to show full diff
         """
-        if not isinstance(diff, basestring):
-            raise Exception('Diff must be a basestring got %s instead' % type(diff))
+        if not isinstance(diff, bytes):
+            raise Exception('Diff must be bytes - got %s' % type(diff))
 
-        self._diff = diff
+        self._diff = memoryview(diff)
         self.adds = 0
         self.removes = 0
         self.diff_limit = diff_limit
@@ -317,7 +315,7 @@
                 self.limited_diff = True
                 continue
 
-            head, diff_lines = _get_header(self.vcs, buffer(self._diff, start, end - start))
+            head, diff_lines = _get_header(self.vcs, self._diff[start:end])
 
             op = None
             stats = {
@@ -399,7 +397,7 @@
                 'new_lineno': '',
                 'action':     'context',
                 'line':       msg,
-                } for _op, msg in stats['ops'].iteritems()
+                } for _op, msg in stats['ops'].items()
                   if _op not in [MOD_FILENODE]])
 
             _files.append({
@@ -420,22 +418,22 @@
             for chunk in diff_data['chunks']:
                 lineiter = iter(chunk)
                 try:
-                    peekline = lineiter.next()
+                    peekline = next(lineiter)
                     while True:
                         # find a first del line
                         while peekline['action'] != 'del':
-                            peekline = lineiter.next()
+                            peekline = next(lineiter)
                         delline = peekline
-                        peekline = lineiter.next()
+                        peekline = next(lineiter)
                         # if not followed by add, eat all following del lines
                         if peekline['action'] != 'add':
                             while peekline['action'] == 'del':
-                                peekline = lineiter.next()
+                                peekline = next(lineiter)
                             continue
                         # found an add - make sure it is the only one
                         addline = peekline
                         try:
-                            peekline = lineiter.next()
+                            peekline = next(lineiter)
                         except StopIteration:
                             # add was last line - ok
                             _highlight_inline_diff(delline, addline)
@@ -479,10 +477,10 @@
             return ' <i></i>'
         assert False
 
-    return _escape_re.sub(substitute, safe_unicode(string))
+    return _escape_re.sub(substitute, safe_str(string))
 
 
-_git_header_re = re.compile(r"""
+_git_header_re = re.compile(br"""
     ^diff[ ]--git[ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
     (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
        ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
@@ -499,7 +497,7 @@
 """, re.VERBOSE | re.MULTILINE)
 
 
-_hg_header_re = re.compile(r"""
+_hg_header_re = re.compile(br"""
     ^diff[ ]--git[ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
     (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
        ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
@@ -518,6 +516,9 @@
 """, re.VERBOSE | re.MULTILINE)
 
 
+_header_next_check = re.compile(br'''(?!@)(?!literal )(?!delta )''')
+
+
 def _get_header(vcs, diff_chunk):
     """
     Parses a Git diff for a single file (header and chunks) and returns a tuple with:
@@ -537,11 +538,11 @@
         match = _hg_header_re.match(diff_chunk)
     if match is None:
         raise Exception('diff not recognized as valid %s diff' % vcs)
-    meta_info = match.groupdict()
+    meta_info = {k: None if v is None else safe_str(v) for k, v in match.groupdict().items()}
     rest = diff_chunk[match.end():]
-    if rest and not rest.startswith('@') and not rest.startswith('literal ') and not rest.startswith('delta '):
-        raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, diff_chunk[:match.end()], rest[:1000]))
-    diff_lines = (_escaper(m.group(0)) for m in re.finditer(r'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
+    if rest and _header_next_check.match(rest):
+        raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, safe_str(bytes(diff_chunk[:match.end()])), safe_str(bytes(rest[:1000]))))
+    diff_lines = (_escaper(m.group(0)) for m in re.finditer(br'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
     return meta_info, diff_lines
 
 
@@ -559,7 +560,7 @@
 
     chunks = []
     try:
-        line = diff_lines.next()
+        line = next(diff_lines)
 
         while True:
             lines = []
@@ -590,7 +591,7 @@
                         'line':       line,
                     })
 
-            line = diff_lines.next()
+            line = next(diff_lines)
 
             while old_line < old_end or new_line < new_end:
                 if not line:
@@ -623,7 +624,7 @@
                         'line':         line[1:],
                     })
 
-                line = diff_lines.next()
+                line = next(diff_lines)
 
                 if _newline_marker.match(line):
                     # we need to append to lines, since this is not
@@ -634,7 +635,7 @@
                         'action':       'context',
                         'line':         line,
                     })
-                    line = diff_lines.next()
+                    line = next(diff_lines)
             if old_line > old_end:
                 raise Exception('error parsing diff - more than %s "-" lines at -%s+%s' % (old_end, old_line, new_line))
             if new_line > new_end:
--- a/kallithea/lib/exceptions.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/exceptions.py	Sat May 02 21:20:43 2020 +0200
@@ -74,9 +74,8 @@
     pass
 
 
-class RepositoryCreationError(Exception):
+class HgsubversionImportError(Exception):
     pass
 
-
-class HgsubversionImportError(Exception):
+class InvalidCloneUriException(Exception):
     pass
--- a/kallithea/lib/ext_json.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/ext_json.py	Sat May 02 21:20:43 2020 +0200
@@ -1,16 +1,16 @@
 """
-Extended JSON encoder for json
+Extended JSON encoder with support for more data types
 
-json.org does not specify how date time can be represented - monkeypatch it to do something.
+json.org does not specify how date time can be represented - just encode it somehow and ignore decoding ...
 """
 
 import datetime
 import decimal
 import functools
-import json  # is re-exported after monkey patching
+import json
 
 
-__all__ = ['json']
+__all__ = ['dumps', 'dump', 'load', 'loads']
 
 
 def _is_tz_aware(value):
@@ -70,10 +70,12 @@
         try:
             return _obj_dump(obj)
         except NotImplementedError:
-            pass
+            pass  # quiet skipping of unsupported types!
         raise TypeError("%r is not JSON serializable" % (obj,))
 
 
-# monkey-patch and export JSON encoder to use custom encoding method
-json.dumps = functools.partial(json.dumps, cls=ExtendedEncoder)
-json.dump = functools.partial(json.dump, cls=ExtendedEncoder)
+dumps = functools.partial(json.dumps, cls=ExtendedEncoder)
+dump = functools.partial(json.dump, cls=ExtendedEncoder)
+# No special support for loading these types back!!!
+load = json.load
+loads = json.loads
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/feeds.py	Sat May 02 21:20:43 2020 +0200
@@ -0,0 +1,152 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+kallithea.lib.feeds
+~~~~~~~~~~~~~~~~~~~
+
+Shared code for providing RSS and ATOM feeds.
+"""
+
+import datetime
+import re
+
+import mako.template
+
+
+language = 'en-us'
+ttl = "5"
+
+
+# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
+def rfc2822_date(date):
+    # We do this ourselves to be timezone aware, email.Utils is not tz aware.
+    if getattr(date, "tzinfo", False):
+        time_str = date.strftime('%a, %d %b %Y %H:%M:%S ')
+        offset = date.tzinfo.utcoffset(date)
+        timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
+        hour, minute = divmod(timezone, 60)
+        return time_str + "%+03d%02d" % (hour, minute)
+    else:
+        return date.strftime('%a, %d %b %Y %H:%M:%S -0000')
+
+# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
+def rfc3339_date(date):
+    if getattr(date, "tzinfo", False):
+        time_str = date.strftime('%Y-%m-%dT%H:%M:%S')
+        offset = date.tzinfo.utcoffset(date)
+        timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
+        hour, minute = divmod(timezone, 60)
+        return time_str + "%+03d:%02d" % (hour, minute)
+    else:
+        return date.strftime('%Y-%m-%dT%H:%M:%SZ')
+
+# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
+def get_tag_uri(url, date):
+    "Creates a TagURI. See http://diveintomark.org/archives/2004/05/28/howto-atom-id"
+    tag = re.sub('^http://', '', url)
+    if date is not None:
+        tag = re.sub('/', ',%s:/' % date.strftime('%Y-%m-%d'), tag, 1)
+    tag = re.sub('#', '/', tag)
+    return 'tag:' + tag
+
+
+class Attributes(object):
+    """Simple namespace for attribute dict access in mako and elsewhere"""
+    def __init__(self, a_dict):
+        self.__dict__ = a_dict
+
+
+class _Feeder(object):
+
+    content_type = None
+    template = None  # subclass must provide a mako.template.Template
+
+    @classmethod
+    def render(cls, header, entries):
+        try:
+            latest_pubdate = max(
+                pubdate for pubdate in (e.get('pubdate') for e in entries)
+                if pubdate
+            )
+        except ValueError:  # max() arg is an empty sequence ... or worse
+            latest_pubdate = datetime.datetime.now()
+
+        return cls.template.render(
+            language=language,
+            ttl=ttl,  # rss only
+            latest_pubdate=latest_pubdate,
+            rfc2822_date=rfc2822_date,  # for RSS
+            rfc3339_date=rfc3339_date,  # for Atom
+            get_tag_uri=get_tag_uri,
+            entries=[Attributes(e) for e in entries],
+            **header
+        )
+
+
+class AtomFeed(_Feeder):
+
+    content_type = 'application/atom+xml'
+
+    template = mako.template.Template('''\
+<?xml version="1.0" encoding="utf-8"?>
+<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="${language}">
+  <title>${title}</title>
+  <link href="${link}" rel="alternate"></link>
+  <id>${link}</id>
+  <updated>${rfc3339_date(latest_pubdate)}</updated>
+  % for entry in entries:
+  <entry>
+    <title>${entry.title}</title>
+    <link href="${entry.link}" rel="alternate"></link>
+    <updated>${rfc3339_date(entry.pubdate)}</updated>
+    <published>${rfc3339_date(entry.pubdate)}</published>
+    <author>
+      <name>${entry.author_name}</name>
+      <email>${entry.author_email}</email>
+    </author>
+    <id>${get_tag_uri(entry.link, entry.pubdate)}</id>
+    <summary type="html">${entry.description}</summary>
+  </entry>
+  % endfor
+</feed>
+''', default_filters=['x'], output_encoding='utf-8', encoding_errors='replace')
+
+
+class RssFeed(_Feeder):
+
+    content_type = 'application/rss+xml'
+
+    template = mako.template.Template('''\
+<?xml version="1.0" encoding="utf-8"?>
+<rss version="2.0">
+  <channel>
+    <title>${title}</title>
+    <link>${link}</link>
+    <description>${description}</description>
+    <language>${language}</language>
+    <lastBuildDate>${rfc2822_date(latest_pubdate)}</lastBuildDate>
+    <ttl>${ttl}</ttl>
+    % for entry in entries:
+    <item>
+      <title>${entry.title}</title>
+      <link>${entry.link}</link>
+      <description>${entry.description}</description>
+      <author>${entry.author_email} (${entry.author_name})</author>
+      <pubDate>${rfc2822_date(entry.pubdate)}</pubDate>
+    </item>
+    % endfor
+  </channel>
+</rss>
+''', default_filters=['x'], output_encoding='utf-8', encoding_errors='replace')
--- a/kallithea/lib/helpers.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/helpers.py	Sat May 02 21:20:43 2020 +0200
@@ -22,9 +22,8 @@
 import logging
 import random
 import re
-import StringIO
 import textwrap
-import urlparse
+import urllib.parse
 
 from beaker.cache import cache_region
 from pygments import highlight as code_highlight
@@ -49,7 +48,7 @@
 from kallithea.lib.pygmentsutils import get_custom_lexer
 from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
 from kallithea.lib.utils2 import age as _age
-from kallithea.lib.utils2 import credentials_filter, safe_int, safe_str, safe_unicode, str2bool, time_to_datetime
+from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime
 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
 #==============================================================================
@@ -58,6 +57,25 @@
 from kallithea.lib.vcs.utils import author_email, author_name
 
 
+# mute pyflakes "imported but unused"
+assert Option
+assert checkbox
+assert end_form
+assert password
+assert radio
+assert submit
+assert text
+assert textarea
+assert format_byte_size
+assert chop_at
+assert wrap_paragraphs
+assert HasPermissionAny
+assert HasRepoGroupPermissionLevel
+assert HasRepoPermissionLevel
+assert time_to_datetime
+assert EmptyChangeset
+
+
 log = logging.getLogger(__name__)
 
 
@@ -167,7 +185,7 @@
         for x in option_list:
             if isinstance(x, tuple) and len(x) == 2:
                 value, label = x
-            elif isinstance(x, basestring):
+            elif isinstance(x, str):
                 value = label = x
             else:
                 log.error('invalid select option %r', x)
@@ -177,7 +195,7 @@
                 for x in value:
                     if isinstance(x, tuple) and len(x) == 2:
                         group_value, group_label = x
-                    elif isinstance(x, basestring):
+                    elif isinstance(x, str):
                         group_value = group_label = x
                     else:
                         log.error('invalid select option %r', x)
@@ -200,14 +218,12 @@
     :param path:
     """
 
-    return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_str(path)).hexdigest()[:12])
+    return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
 
 
 class _FilesBreadCrumbs(object):
 
     def __call__(self, repo_name, rev, paths):
-        if isinstance(paths, str):
-            paths = safe_unicode(paths)
         url_l = [link_to(repo_name, url('files_home',
                                         repo_name=repo_name,
                                         revision=rev, f_path=''),
@@ -246,12 +262,12 @@
             yield i, t
 
     def _wrap_tablelinenos(self, inner):
-        dummyoutfile = StringIO.StringIO()
+        inner_lines = []
         lncount = 0
         for t, line in inner:
             if t:
                 lncount += 1
-            dummyoutfile.write(line)
+            inner_lines.append(line)
 
         fl = self.linenostart
         mw = len(str(lncount + fl - 1))
@@ -304,7 +320,7 @@
                       '<tr><td class="linenos"><div class="linenodiv">'
                       '<pre>' + ls + '</pre></div></td>'
                       '<td id="hlcode" class="code">')
-        yield 0, dummyoutfile.getvalue()
+        yield 0, ''.join(inner_lines)
         yield 0, '</td></tr></table>'
 
 
@@ -331,7 +347,48 @@
     """
     lexer = get_custom_lexer(filenode.extension) or filenode.lexer
     return literal(markup_whitespace(
-        code_highlight(filenode.content, lexer, CodeHtmlFormatter(**kwargs))))
+        code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
+
+
+def hsv_to_rgb(h, s, v):
+    if s == 0.0:
+        return v, v, v
+    i = int(h * 6.0)  # XXX assume int() truncates!
+    f = (h * 6.0) - i
+    p = v * (1.0 - s)
+    q = v * (1.0 - s * f)
+    t = v * (1.0 - s * (1.0 - f))
+    i = i % 6
+    if i == 0:
+        return v, t, p
+    if i == 1:
+        return q, v, p
+    if i == 2:
+        return p, v, t
+    if i == 3:
+        return p, q, v
+    if i == 4:
+        return t, p, v
+    if i == 5:
+        return v, p, q
+
+
+def gen_color(n=10000):
+    """generator for getting n of evenly distributed colors using
+    hsv color and golden ratio. It always return same order of colors
+
+    :returns: RGB tuple
+    """
+
+    golden_ratio = 0.618033988749895
+    h = 0.22717784590367374
+
+    for _unused in range(n):
+        h += golden_ratio
+        h %= 1
+        HSV_tuple = [h, 0.95, 0.95]
+        RGB_tuple = hsv_to_rgb(*HSV_tuple)
+        yield [str(int(x * 256)) for x in RGB_tuple]
 
 
 def pygmentize_annotation(repo_name, filenode, **kwargs):
@@ -340,82 +397,38 @@
 
     :param filenode:
     """
-
+    cgenerator = gen_color()
     color_dict = {}
 
-    def gen_color(n=10000):
-        """generator for getting n of evenly distributed colors using
-        hsv color and golden ratio. It always return same order of colors
-
-        :returns: RGB tuple
-        """
-
-        def hsv_to_rgb(h, s, v):
-            if s == 0.0:
-                return v, v, v
-            i = int(h * 6.0)  # XXX assume int() truncates!
-            f = (h * 6.0) - i
-            p = v * (1.0 - s)
-            q = v * (1.0 - s * f)
-            t = v * (1.0 - s * (1.0 - f))
-            i = i % 6
-            if i == 0:
-                return v, t, p
-            if i == 1:
-                return q, v, p
-            if i == 2:
-                return p, v, t
-            if i == 3:
-                return p, q, v
-            if i == 4:
-                return t, p, v
-            if i == 5:
-                return v, p, q
-
-        golden_ratio = 0.618033988749895
-        h = 0.22717784590367374
-
-        for _unused in xrange(n):
-            h += golden_ratio
-            h %= 1
-            HSV_tuple = [h, 0.95, 0.95]
-            RGB_tuple = hsv_to_rgb(*HSV_tuple)
-            yield map(lambda x: str(int(x * 256)), RGB_tuple)
-
-    cgenerator = gen_color()
-
     def get_color_string(cs):
         if cs in color_dict:
             col = color_dict[cs]
         else:
-            col = color_dict[cs] = cgenerator.next()
+            col = color_dict[cs] = next(cgenerator)
         return "color: rgb(%s)! important;" % (', '.join(col))
 
-    def url_func(repo_name):
-
-        def _url_func(changeset):
-            author = escape(changeset.author)
-            date = changeset.date
-            message = escape(changeset.message)
-            tooltip_html = ("<b>Author:</b> %s<br/>"
-                            "<b>Date:</b> %s</b><br/>"
-                            "<b>Message:</b> %s") % (author, date, message)
+    def url_func(changeset):
+        author = escape(changeset.author)
+        date = changeset.date
+        message = escape(changeset.message)
+        tooltip_html = ("<b>Author:</b> %s<br/>"
+                        "<b>Date:</b> %s</b><br/>"
+                        "<b>Message:</b> %s") % (author, date, message)
 
-            lnk_format = show_id(changeset)
-            uri = link_to(
-                    lnk_format,
-                    url('changeset_home', repo_name=repo_name,
-                        revision=changeset.raw_id),
-                    style=get_color_string(changeset.raw_id),
-                    **{'data-toggle': 'popover',
-                       'data-content': tooltip_html}
-                  )
+        lnk_format = show_id(changeset)
+        uri = link_to(
+                lnk_format,
+                url('changeset_home', repo_name=repo_name,
+                    revision=changeset.raw_id),
+                style=get_color_string(changeset.raw_id),
+                **{'data-toggle': 'popover',
+                   'data-content': tooltip_html}
+              )
 
-            uri += '\n'
-            return uri
-        return _url_func
+        uri += '\n'
+        return uri
 
-    return literal(markup_whitespace(annotate_highlight(filenode, url_func(repo_name), **kwargs)))
+    return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
 
 
 class _Message(object):
@@ -424,22 +437,14 @@
     Converting the message to a string returns the message text. Instances
     also have the following attributes:
 
-    * ``message``: the message text.
     * ``category``: the category specified when the message was created.
+    * ``message``: the html-safe message text.
     """
 
     def __init__(self, category, message):
         self.category = category
         self.message = message
 
-    def __str__(self):
-        return self.message
-
-    __unicode__ = __str__
-
-    def __html__(self):
-        return escape(safe_unicode(self.message))
-
 
 def _session_flash_messages(append=None, clear=False):
     """Manage a message queue in tg.session: return the current message queue
@@ -461,7 +466,7 @@
     return flash_messages
 
 
-def flash(message, category=None, logf=None):
+def flash(message, category, logf=None):
     """
     Show a message to the user _and_ log it through the specified function
 
@@ -471,14 +476,22 @@
     logf defaults to log.info, unless category equals 'success', in which
     case logf defaults to log.debug.
     """
+    assert category in ('error', 'success', 'warning'), category
+    if hasattr(message, '__html__'):
+        # render to HTML for storing in cookie
+        safe_message = str(message)
+    else:
+        # Apply str - the message might be an exception with __str__
+        # Escape, so we can trust the result without further escaping, without any risk of injection
+        safe_message = html_escape(str(message))
     if logf is None:
         logf = log.info
         if category == 'success':
             logf = log.debug
 
-    logf('Flash %s: %s', category, message)
+    logf('Flash %s: %s', category, safe_message)
 
-    _session_flash_messages(append=(category, message))
+    _session_flash_messages(append=(category, safe_message))
 
 
 def pop_flash_messages():
@@ -486,14 +499,22 @@
 
     The return value is a list of ``Message`` objects.
     """
-    return [_Message(*m) for m in _session_flash_messages(clear=True)]
+    return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
 
 
-age = lambda x, y=False: _age(x, y)
-capitalize = lambda x: x.capitalize()
+def age(x, y=False):
+    return _age(x, y)
+
+def capitalize(x):
+    return x.capitalize()
+
 email = author_email
-short_id = lambda x: x[:12]
-hide_credentials = lambda x: ''.join(credentials_filter(x))
+
+def short_id(x):
+    return x[:12]
+
+def hide_credentials(x):
+    return ''.join(credentials_filter(x))
 
 
 def show_id(cs):
@@ -516,8 +537,7 @@
 
 def fmt_date(date):
     if date:
-        return date.strftime("%Y-%m-%d %H:%M:%S").decode('utf-8')
-
+        return date.strftime("%Y-%m-%d %H:%M:%S")
     return ""
 
 
@@ -548,7 +568,7 @@
     email = author_email(author)
     if email:
         from kallithea.model.db import User
-        user = User.get_by_email(email, cache=True) # cache will only use sql_cache_short
+        user = User.get_by_email(email)
         if user is not None:
             return getattr(user, show_attr)
     return None
@@ -590,15 +610,12 @@
 
 def person_by_id(id_, show_attr="username"):
     from kallithea.model.db import User
-    # attr to return from fetched user
-    person_getter = lambda usr: getattr(usr, show_attr)
-
     # maybe it's an ID ?
     if str(id_).isdigit() or isinstance(id_, int):
         id_ = int(id_)
         user = User.get(id_)
         if user is not None:
-            return person_getter(user)
+            return getattr(user, show_attr)
     return id_
 
 
@@ -677,7 +694,7 @@
             return _op, _name
 
         revs = []
-        if len(filter(lambda v: v != '', revs_ids)) > 0:
+        if len([v for v in revs_ids if v != '']) > 0:
             repo = None
             for rev in revs_ids[:revs_top_limit]:
                 _op, _name = _get_op(rev)
@@ -850,10 +867,7 @@
             .replace('[', '<b>') \
             .replace(']', '</b>')
 
-    action_params_func = lambda: ""
-
-    if callable(action_str[1]):
-        action_params_func = action_str[1]
+    action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "")
 
     def action_parser_icon():
         action = user_log.action
@@ -937,13 +951,13 @@
     if email_address == _def:
         return default
 
-    parsed_url = urlparse.urlparse(url.current(qualified=True))
+    parsed_url = urllib.parse.urlparse(url.current(qualified=True))
     url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
                .replace('{email}', email_address) \
-               .replace('{md5email}', hashlib.md5(safe_str(email_address).lower()).hexdigest()) \
+               .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
                .replace('{netloc}', parsed_url.netloc) \
                .replace('{scheme}', parsed_url.scheme) \
-               .replace('{size}', safe_str(size))
+               .replace('{size}', str(size))
     return url
 
 
@@ -959,7 +973,7 @@
         suf = ''
         if len(nodes) > 30:
             suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
-        return literal(pref + '<br/> '.join([safe_unicode(x.path)
+        return literal(pref + '<br/> '.join([x.path
                                              for x in nodes[:30]]) + suf)
     else:
         return ': ' + _('No files')
@@ -1069,6 +1083,8 @@
     URLs links to what they say.
     Issues are linked to given issue-server.
     If link_ is provided, all text not already linking somewhere will link there.
+    >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>")
+    literal('Urlify <a href="http://example.com/">http://example.com/</a> and &#39;<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> &lt;b&gt;markup/b&gt;')
     """
 
     def _replace(match_obj):
@@ -1162,10 +1178,11 @@
         assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
 
         # Build chain of urlify functions, starting with not doing any transformation
-        tmp_urlify_issues_f = lambda s: s
+        def tmp_urlify_issues_f(s):
+            return s
 
         issue_pat_re = re.compile(r'issue_pat(.*)')
-        for k in CONFIG.keys():
+        for k in CONFIG:
             # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
             m = issue_pat_re.match(k)
             if m is None:
@@ -1214,9 +1231,9 @@
                      'url': issue_url,
                      'text': issue_text,
                     }
-            tmp_urlify_issues_f = (lambda s,
-                                          issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f:
-                                   issue_re.sub(issues_replace, chain_f(s)))
+
+            def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
+                return issue_re.sub(issues_replace, chain_f(s))
 
         # Set tmp function globally - atomically
         _urlify_issues_f = tmp_urlify_issues_f
@@ -1229,7 +1246,7 @@
     Render plain text with revision hashes and issue references urlified
     and with @mention highlighting.
     """
-    s = safe_unicode(source)
+    s = safe_str(source)
     s = urlify_text(s, repo_name=repo_name)
     return literal('<div class="formatted-fixed">%s</div>' % s)
 
--- a/kallithea/lib/hooks.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/hooks.py	Sat May 02 21:20:43 2020 +0200
@@ -25,17 +25,17 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import binascii
 import os
 import sys
 import time
 
+import mercurial.scmutil
+
 from kallithea.lib import helpers as h
 from kallithea.lib.exceptions import UserCreationError
-from kallithea.lib.utils import action_logger, make_ui, setup_cache_regions
-from kallithea.lib.utils2 import HookEnvironmentError, get_hook_environment, safe_str, safe_unicode
+from kallithea.lib.utils import action_logger, make_ui
+from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
-from kallithea.lib.vcs.utils.hgcompat import revrange
 from kallithea.model.db import Repository, User
 
 
@@ -44,7 +44,7 @@
         alias += '.'
 
     size_scm, size_root = 0, 0
-    for path, dirs, files in os.walk(safe_str(root_path)):
+    for path, dirs, files in os.walk(root_path):
         if path.find(alias) != -1:
             for f in files:
                 try:
@@ -66,16 +66,16 @@
 
 
 def repo_size(ui, repo, hooktype=None, **kwargs):
-    """Presents size of repository after push"""
-    size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
+    """Show size of Mercurial repository, to be called after push."""
+    size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', safe_str(repo.root))
 
     last_cs = repo[len(repo) - 1]
 
     msg = ('Repository size .hg: %s Checkout: %s Total: %s\n'
            'Last revision is now r%s:%s\n') % (
-        size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
+        size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12]
     )
-    ui.status(msg)
+    ui.status(safe_bytes(msg))
 
 
 def log_pull_action(ui, repo, **kwargs):
@@ -110,8 +110,7 @@
     Note: This hook is not only logging, but also the side effect invalidating
     cahes! The function should perhaps be renamed.
     """
-    _h = binascii.hexlify
-    revs = [_h(repo[r].node()) for r in revrange(repo, [node + ':' + node_last])]
+    revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])]
     process_pushed_raw_ids(revs)
     return 0
 
@@ -303,31 +302,23 @@
     they thus need enough info to be able to create an app environment and
     connect to the database.
     """
-    from paste.deploy import appconfig
-    from sqlalchemy import engine_from_config
-    from kallithea.config.environment import load_environment
-    from kallithea.model.base import init_model
+    import paste.deploy
+    import kallithea.config.middleware
 
     extras = get_hook_environment()
-    ini_file_path = extras['config']
-    #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging
-    app_conf = appconfig('config:%s' % ini_file_path)
-    conf = load_environment(app_conf.global_conf, app_conf.local_conf)
 
-    setup_cache_regions(conf)
+    path_to_ini_file = extras['config']
+    kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file)
+    #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging
+    kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
 
-    engine = engine_from_config(conf, 'sqlalchemy.')
-    init_model(engine)
-
-    repo_path = safe_unicode(repo_path)
     # fix if it's not a bare repo
     if repo_path.endswith(os.sep + '.git'):
         repo_path = repo_path[:-5]
 
     repo = Repository.get_by_full_path(repo_path)
     if not repo:
-        raise OSError('Repository %s not found in database'
-                      % (safe_str(repo_path)))
+        raise OSError('Repository %s not found in database' % repo_path)
 
     baseui = make_ui()
     return baseui, repo
@@ -368,19 +359,20 @@
             if push_ref['old_rev'] == EmptyChangeset().raw_id:
                 # update the symbolic ref if we push new repo
                 if scm_repo.is_empty():
-                    scm_repo._repo.refs.set_symbolic_ref('HEAD',
-                                        'refs/heads/%s' % push_ref['name'])
+                    scm_repo._repo.refs.set_symbolic_ref(
+                        b'HEAD',
+                        b'refs/heads/%s' % safe_bytes(push_ref['name']))
 
                 # build exclude list without the ref
                 cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*']
-                stdout, stderr = scm_repo.run_git_command(cmd)
+                stdout = scm_repo.run_git_command(cmd)
                 ref = push_ref['ref']
                 heads = [head for head in stdout.splitlines() if head != ref]
                 # now list the git revs while excluding from the list
                 cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H']
                 cmd.append('--not')
                 cmd.extend(heads) # empty list is ok
-                stdout, stderr = scm_repo.run_git_command(cmd)
+                stdout = scm_repo.run_git_command(cmd)
                 git_revs += stdout.splitlines()
 
             elif push_ref['new_rev'] == EmptyChangeset().raw_id:
@@ -389,7 +381,7 @@
             else:
                 cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref,
                        '--reverse', '--pretty=format:%H']
-                stdout, stderr = scm_repo.run_git_command(cmd)
+                stdout = scm_repo.run_git_command(cmd)
                 git_revs += stdout.splitlines()
 
         elif _type == 'tags':
@@ -404,5 +396,5 @@
 def rejectpush(ui, **kwargs):
     """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos"""
     ex = get_hook_environment()
-    ui.warn((b"Push access to %r denied\n") % safe_str(ex.repository))
+    ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository))
     return 1
--- a/kallithea/lib/indexers/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/indexers/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -146,7 +146,7 @@
             docnum = self.matcher.id()
             chunks = [offsets for offsets in self.get_chunks()]
             docs_id.append([docnum, chunks])
-            self.matcher.next()
+            self.matcher.next()  # this looks like a py2 iterator ... but it isn't
         return docs_id
 
     def __str__(self):
@@ -203,7 +203,7 @@
         return res
 
     def get_short_content(self, res, chunks):
-        return u''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
+        return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
 
     def get_chunks(self):
         """
--- a/kallithea/lib/indexers/daemon.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/indexers/daemon.py	Sat May 02 21:20:43 2020 +0200
@@ -39,8 +39,8 @@
 
 from kallithea.config.conf import INDEX_EXTENSIONS, INDEX_FILENAMES
 from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA
-from kallithea.lib.utils2 import safe_str, safe_unicode
-from kallithea.lib.vcs.exceptions import ChangesetError, NodeDoesNotExistError, RepositoryError
+from kallithea.lib.utils2 import safe_str
+from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, NodeDoesNotExistError, RepositoryError
 from kallithea.model.db import Repository
 from kallithea.model.scm import ScmModel
 
@@ -77,8 +77,7 @@
 
         # filter repo list
         if repo_list:
-            # Fix non-ascii repo names to unicode
-            repo_list = map(safe_unicode, repo_list)
+            repo_list = set(repo_list)
             self.filtered_repo_paths = {}
             for repo_name, repo in self.repo_paths.items():
                 if repo_name in repo_list:
@@ -110,7 +109,7 @@
             self.initial = False
 
     def _get_index_revision(self, repo):
-        db_repo = Repository.get_by_repo_name(repo.name_unicode)
+        db_repo = Repository.get_by_repo_name(repo.name)
         landing_rev = 'tip'
         if db_repo:
             _rev_type, _rev = db_repo.landing_rev
@@ -133,7 +132,7 @@
             cs = self._get_index_changeset(repo)
             for _topnode, _dirs, files in cs.walk('/'):
                 for f in files:
-                    index_paths_.add(os.path.join(safe_str(repo.path), safe_str(f.path)))
+                    index_paths_.add(os.path.join(repo.path, f.path))
 
         except RepositoryError:
             log.debug(traceback.format_exc())
@@ -142,19 +141,16 @@
 
     def get_node(self, repo, path, index_rev=None):
         """
-        gets a filenode based on given full path. It operates on string for
-        hg git compatibility.
+        gets a filenode based on given full path.
 
         :param repo: scm repo instance
         :param path: full path including root location
         :return: FileNode
         """
         # FIXME: paths should be normalized ... or even better: don't include repo.path
-        path = safe_str(path)
-        repo_path = safe_str(repo.path)
-        assert path.startswith(repo_path)
-        assert path[len(repo_path)] in (os.path.sep, os.path.altsep)
-        node_path = path[len(repo_path) + 1:]
+        assert path.startswith(repo.path)
+        assert path[len(repo.path)] in (os.path.sep, os.path.altsep)
+        node_path = path[len(repo.path) + 1:]
         cs = self._get_index_changeset(repo, index_rev=index_rev)
         node = cs.get_node(node_path)
         return node
@@ -182,27 +178,27 @@
 
         indexed = indexed_w_content = 0
         if self.is_indexable_node(node):
-            u_content = node.content
-            if not isinstance(u_content, unicode):
+            bytes_content = node.content
+            if b'\0' in bytes_content:
                 log.warning('    >> %s - no text content', path)
-                u_content = u''
+                u_content = ''
             else:
                 log.debug('    >> %s', path)
+                u_content = safe_str(bytes_content)
                 indexed_w_content += 1
 
         else:
             log.debug('    >> %s - not indexable', path)
             # just index file name without it's content
-            u_content = u''
+            u_content = ''
             indexed += 1
 
-        p = safe_unicode(path)
         writer.add_document(
-            fileid=p,
-            owner=unicode(repo.contact),
-            repository_rawname=safe_unicode(repo_name),
-            repository=safe_unicode(repo_name),
-            path=p,
+            fileid=path,
+            owner=repo.contact,
+            repository_rawname=repo_name,
+            repository=repo_name,
+            path=path,
             content=u_content,
             modtime=self.get_node_mtime(node),
             extension=node.extension
@@ -237,18 +233,18 @@
             indexed += 1
             log.debug('    >> %s %s/%s', cs, indexed, total)
             writer.add_document(
-                raw_id=unicode(cs.raw_id),
-                owner=unicode(repo.contact),
+                raw_id=cs.raw_id,
+                owner=repo.contact,
                 date=cs._timestamp,
-                repository_rawname=safe_unicode(repo_name),
-                repository=safe_unicode(repo_name),
+                repository_rawname=repo_name,
+                repository=repo_name,
                 author=cs.author,
                 message=cs.message,
                 last=cs.last,
-                added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(),
-                removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(),
-                changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(),
-                parents=u' '.join([cs.raw_id for cs in cs.parents]),
+                added=' '.join(node.path for node in cs.added).lower(),
+                removed=' '.join(node.path for node in cs.removed).lower(),
+                changed=' '.join(node.path for node in cs.changed).lower(),
+                parents=' '.join(cs.raw_id for cs in cs.parents),
             )
 
         return indexed
@@ -291,7 +287,7 @@
                         continue
 
                     qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
-                    q = qp.parse(u"last:t AND %s" % repo_name)
+                    q = qp.parse("last:t AND %s" % repo_name)
 
                     results = searcher.search(q)
 
@@ -303,14 +299,18 @@
                         # assuming that there is only one result, if not this
                         # may require a full re-index.
                         start_id = results[0]['raw_id']
-                        last_rev = repo.get_changeset(revision=start_id).revision
+                        try:
+                            last_rev = repo.get_changeset(revision=start_id).revision
+                        except ChangesetDoesNotExistError:
+                            log.error('previous last revision %s not found - indexing from scratch', start_id)
+                            start_id = None
 
                     # there are new changesets to index or a new repo to index
                     if last_rev == 0 or num_of_revs > last_rev + 1:
                         # delete the docs in the index for the previous
                         # last changeset(s)
                         for hit in results:
-                            q = qp.parse(u"last:t AND %s AND raw_id:%s" %
+                            q = qp.parse("last:t AND %s AND raw_id:%s" %
                                             (repo_name, hit['raw_id']))
                             writer.delete_by_query(q)
 
@@ -330,8 +330,8 @@
                     log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')
 
     def update_file_index(self):
-        log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
-                   'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys()))
+        log.debug('STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
+                  'AND REPOS %s', INDEX_EXTENSIONS, ' and '.join(self.repo_paths))
 
         idx = open_dir(self.index_location, indexname=self.indexname)
         # The set of all paths in the index
@@ -390,9 +390,7 @@
                 ri_cnt = 0   # indexed
                 riwc_cnt = 0  # indexed with content
                 for path in self.get_paths(repo):
-                    path = safe_unicode(path)
                     if path in to_index or path not in indexed_paths:
-
                         # This is either a file that's changed, or a new file
                         # that wasn't indexed before. So index it!
                         i, iwc = self.add_doc(writer, path, repo, repo_name)
@@ -431,7 +429,7 @@
         file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
         file_idx_writer = file_idx.writer()
         log.debug('BUILDING INDEX FOR EXTENSIONS %s '
-                  'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys()))
+                  'AND REPOS %s', INDEX_EXTENSIONS, ' and '.join(self.repo_paths))
 
         for repo_name, repo in sorted(self.repo_paths.items()):
             log.debug('Updating indices for repo %s', repo_name)
--- a/kallithea/lib/inifile.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/inifile.py	Sat May 02 21:20:43 2020 +0200
@@ -42,6 +42,10 @@
     'uuid': lambda: 'VERY-SECRET',
 }
 
+variable_options = {
+    'database_engine': ['sqlite', 'postgres', 'mysql'],
+    'http_server': ['waitress', 'gearbox', 'gevent', 'gunicorn', 'uwsgi'],
+}
 
 def expand(template, mako_variable_values, settings):
     """Expand mako template and tweak it.
@@ -63,16 +67,27 @@
     ... %elif conditional_options == 'option-b':
     ... some_variable = "never mind - option-b will not be used anyway ..."
     ... %endif
+    ...
+    ... [comment-section]
+    ... #variable3 = 3.0
+    ... #variable4 = 4.0
+    ... #variable5 = 5.0
+    ... variable5 = 5.1
+    ... #variable6 = 6.0
+    ... #variable6 = 6.1
+    ... #variable7 = 7.0
+    ... variable7 = 7.1
     ... '''
-    >>> selected_mako_conditionals = []
     >>> mako_variable_values = {'mako_variable': 'VALUE', 'mako_function': (lambda: 'FUNCTION RESULT'),
-    ...                         'conditional_options': 'option-a'}
+    ...                         'conditional_options': 'option-a', 'http_server': 'nc'}
     >>> settings = { # only partially used
     ...     '[first-section]': {'variable2': 'VAL2', 'first_extra': 'EXTRA'},
+    ...     '[comment-section]': {'variable3': '3.0', 'variable4': '4.1', 'variable5': '5.2', 'variable6': '6.2', 'variable7': '7.0'},
     ...     '[third-section]': {'third_extra': ' 3'},
     ...     '[fourth-section]': {'fourth_extra': '4', 'fourth': '"four"'},
     ... }
-    >>> print expand(template, mako_variable_values, settings)
+    >>> print(expand(template, mako_variable_values, settings))
+    ERROR: http_server is 'nc' - it should be one of 'waitress', 'gearbox', 'gevent', 'gunicorn', 'uwsgi'
     <BLANKLINE>
     [first-section]
     <BLANKLINE>
@@ -87,6 +102,19 @@
     [second-section]
     # option a was chosen
     <BLANKLINE>
+    [comment-section]
+    variable3 = 3.0
+    #variable4 = 4.0
+    variable4 = 4.1
+    #variable5 = 5.0
+    #variable5 = 5.1
+    variable5 = 5.2
+    #variable6 = 6.0
+    #variable6 = 6.1
+    variable6 = 6.2
+    variable7 = 7.0
+    #variable7 = 7.1
+    <BLANKLINE>
     [fourth-section]
     fourth = "four"
     fourth_extra = 4
@@ -99,6 +127,12 @@
     mako_variables.update(mako_variable_values or {})
     settings = dict((k, dict(v)) for k, v in settings.items()) # deep copy before mutating
 
+    for key, value in mako_variables.items():
+        if key in variable_options:
+            if value not in variable_options[key]:
+                print('ERROR: %s is %r - it should be one of %s' %
+                      (key, value, ', '.join(repr(x) for x in variable_options[key])))
+
     ini_lines = mako.template.Template(template).render(**mako_variables)
 
     def process_section(m):
@@ -106,20 +140,44 @@
         sectionname, lines = m.groups()
         if sectionname in settings:
             section_settings = settings.pop(sectionname)
+            add_after_key_value = {}  # map key to value it should be added after
 
-            def process_line(m):
-                """process a section line and update value if necessary"""
-                key, value = m.groups()
+            # 1st pass:
+            # comment out lines with keys that have new values
+            # find best line for keeping or un-commenting (because it has the right value) or adding after (because it is the last with other value)
+            def comment_out(m):
+                """process a section line if in section_settings and comment out and track in add_after_key_value"""
                 line = m.group(0)
-                if key in section_settings:
-                    new_line = '%s = %s' % (key, section_settings.pop(key))
-                    if new_line != line:
-                        # keep old entry as example - comments might refer to it
-                        line = '#%s\n%s' % (line, new_line)
-                return line.rstrip()
+                comment, key, line_value = m.groups()
+                if key not in section_settings:
+                    return line
+                new_value = section_settings[key]
+                if line_value == new_value or add_after_key_value.get(key) != new_value:
+                    add_after_key_value[key] = line_value
+                if comment:
+                    return line
+                return '#' + line
+
+            lines = re.sub(r'^(#)?([^#\n\s]*)[ \t]*=[ \t]*(.*)$', comment_out, lines, flags=re.MULTILINE)
 
-            # process lines that not are comments or empty and look like name=value
-            lines = re.sub(r'^([^#\n\s]*)[ \t]*=[ \t]*(.*)$', process_line, lines, flags=re.MULTILINE)
+            def add_after_comment(m):
+                """process a section comment line and add new value"""
+                line = m.group(0)
+                key, line_value = m.groups()
+                if key not in section_settings:
+                    return line
+                if line_value != add_after_key_value.get(key):
+                    return line
+                new_value = section_settings[key]
+                if new_value == line_value:
+                    line = line.lstrip('#')
+                else:
+                    line += '\n%s = %s' % (key, new_value)
+                section_settings.pop(key)
+                return line
+
+            lines = re.sub(r'^#([^#\n\s]*)[ \t]*=[ \t]*(.*)$', add_after_comment, lines, flags=re.MULTILINE)
+
             # add unused section settings
             if section_settings:
                 lines += '\n' + ''.join('%s = %s\n' % (key, value) for key, value in sorted(section_settings.items()))
--- a/kallithea/lib/locale.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/locale.py	Sat May 02 21:20:43 2020 +0200
@@ -24,7 +24,7 @@
     Note: UTF-8 is preferred, but for example ISO-8859-1 or mbcs should also
     work under the right circumstances."""
     try:
-        u'\xe9'.encode(sys.getfilesystemencoding()) # Test using é (&eacute;)
+        '\xe9'.encode(sys.getfilesystemencoding()) # Test using é (&eacute;)
     except UnicodeEncodeError:
         log.error("Cannot encode Unicode paths to file system encoding %r", sys.getfilesystemencoding())
         for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
--- a/kallithea/lib/markup_renderer.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/markup_renderer.py	Sat May 02 21:20:43 2020 +0200
@@ -33,7 +33,7 @@
 import bleach
 import markdown as markdown_mod
 
-from kallithea.lib.utils2 import MENTIONS_REGEX, safe_unicode
+from kallithea.lib.utils2 import MENTIONS_REGEX, safe_str
 
 
 log = logging.getLogger(__name__)
@@ -119,17 +119,17 @@
         At last it will just do a simple html replacing new lines with <br/>
 
         >>> MarkupRenderer.render('''<img id="a" style="margin-top:-1000px;color:red" src="http://example.com/test.jpg">''', '.md')
-        u'<p><img id="a" src="http://example.com/test.jpg" style="color: red;"></p>'
+        '<p><img id="a" src="http://example.com/test.jpg" style="color: red;"></p>'
         >>> MarkupRenderer.render('''<img class="c d" src="file://localhost/test.jpg">''', 'b.mkd')
-        u'<p><img class="c d"></p>'
+        '<p><img class="c d"></p>'
         >>> MarkupRenderer.render('''<a href="foo">foo</a>''', 'c.mkdn')
-        u'<p><a href="foo">foo</a></p>'
+        '<p><a href="foo">foo</a></p>'
         >>> MarkupRenderer.render('''<script>alert(1)</script>''', 'd.mdown')
-        u'&lt;script&gt;alert(1)&lt;/script&gt;'
+        '&lt;script&gt;alert(1)&lt;/script&gt;'
         >>> MarkupRenderer.render('''<div onclick="alert(2)">yo</div>''', 'markdown')
-        u'<div>yo</div>'
+        '<div>yo</div>'
         >>> MarkupRenderer.render('''<a href="javascript:alert(3)">yo</a>''', 'md')
-        u'<p><a>yo</a></p>'
+        '<p><a>yo</a></p>'
         """
 
         renderer = cls._detect_renderer(source, filename)
@@ -150,7 +150,11 @@
 
     @classmethod
     def plain(cls, source, universal_newline=True):
-        source = safe_unicode(source)
+        """
+        >>> MarkupRenderer.plain('https://example.com/')
+        '<br /><a href="https://example.com/">https://example.com/</a>'
+        """
+        source = safe_str(source)
         if universal_newline:
             newline = '\n'
             source = newline.join(source.splitlines())
@@ -168,30 +172,30 @@
         with "safe" fall-back to plaintext. Output from this method should be sanitized before use.
 
         >>> MarkupRenderer.markdown('''<img id="a" style="margin-top:-1000px;color:red" src="http://example.com/test.jpg">''')
-        u'<p><img id="a" style="margin-top:-1000px;color:red" src="http://example.com/test.jpg"></p>'
+        '<p><img id="a" style="margin-top:-1000px;color:red" src="http://example.com/test.jpg"></p>'
         >>> MarkupRenderer.markdown('''<img class="c d" src="file://localhost/test.jpg">''')
-        u'<p><img class="c d" src="file://localhost/test.jpg"></p>'
+        '<p><img class="c d" src="file://localhost/test.jpg"></p>'
         >>> MarkupRenderer.markdown('''<a href="foo">foo</a>''')
-        u'<p><a href="foo">foo</a></p>'
+        '<p><a href="foo">foo</a></p>'
         >>> MarkupRenderer.markdown('''<script>alert(1)</script>''')
-        u'<script>alert(1)</script>'
+        '<script>alert(1)</script>'
         >>> MarkupRenderer.markdown('''<div onclick="alert(2)">yo</div>''')
-        u'<div onclick="alert(2)">yo</div>'
+        '<div onclick="alert(2)">yo</div>'
         >>> MarkupRenderer.markdown('''<a href="javascript:alert(3)">yo</a>''')
-        u'<p><a href="javascript:alert(3)">yo</a></p>'
+        '<p><a href="javascript:alert(3)">yo</a></p>'
         >>> MarkupRenderer.markdown('''## Foo''')
-        u'<h2>Foo</h2>'
-        >>> print MarkupRenderer.markdown('''
+        '<h2>Foo</h2>'
+        >>> print(MarkupRenderer.markdown('''
         ...     #!/bin/bash
         ...     echo "hello"
-        ... ''')
+        ... '''))
         <table class="code-highlighttable"><tr><td class="linenos"><div class="linenodiv"><pre>1
         2</pre></div></td><td class="code"><div class="code-highlight"><pre><span></span><span class="ch">#!/bin/bash</span>
         <span class="nb">echo</span> <span class="s2">&quot;hello&quot;</span>
         </pre></div>
         </td></tr></table>
         """
-        source = safe_unicode(source)
+        source = safe_str(source)
         try:
             if flavored:
                 source = cls._flavored_markdown(source)
@@ -209,7 +213,7 @@
 
     @classmethod
     def rst(cls, source, safe=True):
-        source = safe_unicode(source)
+        source = safe_str(source)
         try:
             from docutils.core import publish_parts
             from docutils.parsers.rst import directives
@@ -219,7 +223,7 @@
             docutils_settings.update({'input_encoding': 'unicode',
                                       'report_level': 4})
 
-            for k, v in docutils_settings.iteritems():
+            for k, v in docutils_settings.items():
                 directives.register_directive(k, v)
 
             parts = publish_parts(source=source,
--- a/kallithea/lib/middleware/permanent_repo_url.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/middleware/permanent_repo_url.py	Sat May 02 21:20:43 2020 +0200
@@ -20,7 +20,8 @@
 """
 
 
-from kallithea.lib.utils import fix_repo_id_name, safe_str
+from kallithea.lib.utils import fix_repo_id_name
+from kallithea.lib.utils2 import safe_bytes, safe_str
 
 
 class PermanentRepoUrl(object):
@@ -30,9 +31,11 @@
         self.config = config
 
     def __call__(self, environ, start_response):
-        path_info = environ['PATH_INFO']
+        # Extract path_info as get_path_info does, but do it explicitly because
+        # we also have to do the reverse operation when patching it back in
+        path_info = safe_str(environ['PATH_INFO'].encode('latin1'))
         if path_info.startswith('/'): # it must
-            path_info = '/' + safe_str(fix_repo_id_name(path_info[1:]))
-            environ['PATH_INFO'] = path_info
+            path_info = '/' + fix_repo_id_name(path_info[1:])
+            environ['PATH_INFO'] = safe_bytes(path_info).decode('latin1')
 
         return self.application(environ, start_response)
--- a/kallithea/lib/middleware/pygrack.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/middleware/pygrack.py	Sat May 02 21:20:43 2020 +0200
@@ -33,7 +33,7 @@
 from webob import Request, Response, exc
 
 import kallithea
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import ascii_bytes
 from kallithea.lib.vcs import subprocessio
 
 
@@ -87,7 +87,6 @@
 
         :param path:
         """
-        path = safe_unicode(path)
         assert path.startswith('/' + self.repo_name + '/')
         return path[len(self.repo_name) + 2:].strip('/')
 
@@ -113,14 +112,14 @@
         #                     ref_list
         #                     "0000"
         server_advert = '# service=%s\n' % git_command
-        packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
+        packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
         _git_path = kallithea.CONFIG.get('git_path', 'git')
         cmd = [_git_path, git_command[4:],
                '--stateless-rpc', '--advertise-refs', self.content_path]
         log.debug('handling cmd %s', cmd)
         try:
             out = subprocessio.SubprocessIOChunker(cmd,
-                starting_values=[packet_len + server_advert + '0000']
+                starting_values=[ascii_bytes(packet_len + server_advert + '0000')]
             )
         except EnvironmentError as e:
             log.error(traceback.format_exc())
@@ -166,7 +165,7 @@
             log.error(traceback.format_exc())
             raise exc.HTTPExpectationFailed()
 
-        if git_command in [u'git-receive-pack']:
+        if git_command in ['git-receive-pack']:
             # updating refs manually after each push.
             # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
             from kallithea.lib.vcs import get_repo
@@ -186,7 +185,7 @@
         _path = self._get_fixedpath(req.path_info)
         if _path.startswith('info/refs'):
             app = self.inforefs
-        elif [a for a in self.valid_accepts if a in req.accept]:
+        elif req.accept.acceptable_offers(self.valid_accepts):
             app = self.backend
         try:
             resp = app(req, environ)
--- a/kallithea/lib/middleware/sessionmiddleware.py	Thu Apr 09 18:03:56 2020 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.sessionmiddleware
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-session management middleware
-
-This file overrides Beaker's built-in SessionMiddleware
-class to automagically use secure cookies over HTTPS.
-
-Original Beaker SessionMiddleware class written by Ben Bangert
-"""
-
-from beaker.middleware import SessionMiddleware
-from beaker.session import SessionObject
-
-
-class SecureSessionMiddleware(SessionMiddleware):
-    def __call__(self, environ, start_response):
-        """
-        This function's implementation is taken directly from Beaker,
-        with HTTPS detection added. When accessed over HTTPS, force
-        setting cookie's secure flag.
-
-        The only difference from that original code is that we switch
-        the secure option on and off depending on the URL scheme (first
-        two lines). To avoid concurrency issues, we use a local options
-        variable.
-        """
-        options = dict(self.options)
-        options["secure"] = environ['wsgi.url_scheme'] == 'https'
-
-        session = SessionObject(environ, **options)
-        if environ.get('paste.registry'):
-            if environ['paste.registry'].reglist:
-                environ['paste.registry'].register(self.session, session)
-        environ[self.environ_key] = session
-        environ['beaker.get_session'] = self._get_session
-
-        if 'paste.testing_variables' in environ and 'webtest_varname' in options:
-            environ['paste.testing_variables'][options['webtest_varname']] = session
-
-        def session_start_response(status, headers, exc_info=None):
-            if session.accessed():
-                session.persist()
-                if session.__dict__['_headers']['set_cookie']:
-                    cookie = session.__dict__['_headers']['cookie_out']
-                    if cookie:
-                        headers.append(('Set-cookie', cookie))
-            return start_response(status, headers, exc_info)
-        return self.wrap_app(environ, session_start_response)
--- a/kallithea/lib/middleware/simplegit.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/middleware/simplegit.py	Sat May 02 21:20:43 2020 +0200
@@ -31,11 +31,10 @@
 import logging
 import re
 
-from kallithea.lib.base import BaseVCSController
+from kallithea.lib.base import BaseVCSController, get_path_info
 from kallithea.lib.hooks import log_pull_action
 from kallithea.lib.middleware.pygrack import make_wsgi_app
 from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_unicode
 from kallithea.model.db import Repository
 
 
@@ -57,14 +56,14 @@
 
     @classmethod
     def parse_request(cls, environ):
-        path_info = environ.get('PATH_INFO', '')
+        path_info = get_path_info(environ)
         m = GIT_PROTO_PAT.match(path_info)
         if m is None:
             return None
 
         class parsed_request(object):
             # See https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_the_smart_protocol
-            repo_name = safe_unicode(m.group(1).rstrip('/'))
+            repo_name = m.group(1).rstrip('/')
             cmd = m.group(2)
 
             query_string = environ['QUERY_STRING']
--- a/kallithea/lib/middleware/simplehg.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/middleware/simplehg.py	Sat May 02 21:20:43 2020 +0200
@@ -30,12 +30,13 @@
 
 import logging
 import os
-import urllib
+import urllib.parse
+
+import mercurial.hgweb
 
-from kallithea.lib.base import BaseVCSController
+from kallithea.lib.base import BaseVCSController, get_path_info
 from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import hgweb_mod
+from kallithea.lib.utils2 import safe_bytes
 
 
 log = logging.getLogger(__name__)
@@ -99,12 +100,12 @@
         http_accept = environ.get('HTTP_ACCEPT', '')
         if not http_accept.startswith('application/mercurial'):
             return None
-        path_info = environ.get('PATH_INFO', '')
+        path_info = get_path_info(environ)
         if not path_info.startswith('/'): # it must!
             return None
 
         class parsed_request(object):
-            repo_name = safe_unicode(path_info[1:].rstrip('/'))
+            repo_name = path_info[1:].rstrip('/')
 
             query_string = environ['QUERY_STRING']
 
@@ -120,7 +121,7 @@
                             break
                         action = 'pull'
                         for cmd_arg in hgarg[5:].split(';'):
-                            cmd, _args = urllib.unquote_plus(cmd_arg).split(' ', 1)
+                            cmd, _args = urllib.parse.unquote_plus(cmd_arg).split(' ', 1)
                             op = cmd_mapping.get(cmd, 'push')
                             if op != 'pull':
                                 assert op == 'push'
@@ -136,13 +137,13 @@
         """
         Make an hgweb wsgi application.
         """
-        str_repo_name = safe_str(parsed_request.repo_name)
-        repo_path = os.path.join(safe_str(self.basepath), str_repo_name)
+        repo_name = parsed_request.repo_name
+        repo_path = os.path.join(self.basepath, repo_name)
         baseui = make_ui(repo_path=repo_path)
-        hgweb_app = hgweb_mod.hgweb(repo_path, name=str_repo_name, baseui=baseui)
+        hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
 
         def wrapper_app(environ, start_response):
-            environ['REPO_NAME'] = str_repo_name # used by hgweb_mod.hgweb
+            environ['REPO_NAME'] = repo_name # used by mercurial.hgweb.hgweb
             return hgweb_app(environ, start_response)
 
         return wrapper_app
--- a/kallithea/lib/middleware/wrapper.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/middleware/wrapper.py	Sat May 02 21:20:43 2020 +0200
@@ -29,8 +29,7 @@
 import logging
 import time
 
-from kallithea.lib.base import _get_access_path, _get_ip_addr
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.base import _get_ip_addr, get_path_info
 
 
 log = logging.getLogger(__name__)
@@ -41,12 +40,14 @@
     def __init__(self, start_response):
         self._start_response = start_response
         self._start = time.time()
+        self.status = None
         self._size = 0
 
     def duration(self):
         return time.time() - self._start
 
     def start_response(self, status, response_headers, exc_info=None):
+        self.status = status
         write = self._start_response(status, response_headers, exc_info)
         def metered_write(s):
             self.measure(s)
@@ -64,21 +65,21 @@
 
     def __init__(self, result, meter, description):
         self._result_close = getattr(result, 'close', None) or (lambda: None)
-        self._next = iter(result).next
+        self._next = iter(result).__next__
         self._meter = meter
         self._description = description
 
     def __iter__(self):
         return self
 
-    def next(self):
+    def __next__(self):
         chunk = self._next()
         self._meter.measure(chunk)
         return chunk
 
     def close(self):
         self._result_close()
-        log.info("%s responded after %.3fs with %s bytes", self._description, self._meter.duration(), self._meter.size())
+        log.info("%s responded %r after %.3fs with %s bytes", self._description, self._meter.status, self._meter.duration(), self._meter.size())
 
 
 class RequestWrapper(object):
@@ -91,10 +92,11 @@
         meter = Meter(start_response)
         description = "Request from %s for %s" % (
             _get_ip_addr(environ),
-            safe_unicode(_get_access_path(environ)),
+            get_path_info(environ),
         )
+        log.info("%s received", description)
         try:
             result = self.application(environ, meter.start_response)
         finally:
-            log.info("%s responding after %.3fs", description, meter.duration())
+            log.info("%s responding %r after %.3fs", description, meter.status, meter.duration())
         return ResultIter(result, meter, description)
--- a/kallithea/lib/page.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/page.py	Sat May 02 21:20:43 2020 +0200
@@ -15,11 +15,11 @@
 Custom paging classes
 """
 import logging
-import math
-import re
 
-from webhelpers2.html import HTML, literal
-from webhelpers.paginate import Page as _Page
+import paginate
+import paginate_sqlalchemy
+import sqlalchemy.orm
+from webhelpers2.html import literal
 
 from kallithea.config.routing import url
 
@@ -27,229 +27,36 @@
 log = logging.getLogger(__name__)
 
 
-class Page(_Page):
-    """
-    Custom pager emitting Bootstrap paginators
-    """
-
-    def __init__(self, *args, **kwargs):
-        kwargs.setdefault('url', url.current)
-        _Page.__init__(self, *args, **kwargs)
-
-    def _get_pos(self, cur_page, max_page, items):
-        edge = (items / 2) + 1
-        if (cur_page <= edge):
-            radius = max(items / 2, items - cur_page)
-        elif (max_page - cur_page) < edge:
-            radius = (items - 1) - (max_page - cur_page)
-        else:
-            radius = items / 2
-
-        left = max(1, (cur_page - (radius)))
-        right = min(max_page, cur_page + (radius))
-        return left, cur_page, right
-
-    def _range(self, regexp_match):
-        """
-        Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
-
-        Arguments:
-
-        regexp_match
-            A "re" (regular expressions) match object containing the
-            radius of linked pages around the current page in
-            regexp_match.group(1) as a string
-
-        This function is supposed to be called as a callable in
-        re.sub.
-
-        """
-        radius = int(regexp_match.group(1))
-
-        # Compute the first and last page number within the radius
-        # e.g. '1 .. 5 6 [7] 8 9 .. 12'
-        # -> leftmost_page  = 5
-        # -> rightmost_page = 9
-        leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
-                                                            self.last_page,
-                                                            (radius * 2) + 1)
-        nav_items = []
-
-        # Create a link to the first page (unless we are on the first page
-        # or there would be no need to insert '..' spacers)
-        if self.page != self.first_page and self.first_page < leftmost_page:
-            nav_items.append(HTML.li(self._pagerlink(self.first_page, self.first_page)))
+class Page(paginate.Page):
 
-        # Insert dots if there are pages between the first page
-        # and the currently displayed page range
-        if leftmost_page - self.first_page > 1:
-            # Wrap in a SPAN tag if nolink_attr is set
-            text_ = '..'
-            if self.dotdot_attr:
-                text_ = HTML.span(c=text_, **self.dotdot_attr)
-            nav_items.append(HTML.li(text_))
-
-        for thispage in xrange(leftmost_page, rightmost_page + 1):
-            # Highlight the current page number and do not use a link
-            text_ = str(thispage)
-            if thispage == self.page:
-                # Wrap in a SPAN tag if nolink_attr is set
-                if self.curpage_attr:
-                    text_ = HTML.li(HTML.span(c=text_), **self.curpage_attr)
-                nav_items.append(text_)
-            # Otherwise create just a link to that page
-            else:
-                nav_items.append(HTML.li(self._pagerlink(thispage, text_)))
-
-        # Insert dots if there are pages between the displayed
-        # page numbers and the end of the page range
-        if self.last_page - rightmost_page > 1:
-            text_ = '..'
-            # Wrap in a SPAN tag if nolink_attr is set
-            if self.dotdot_attr:
-                text_ = HTML.span(c=text_, **self.dotdot_attr)
-            nav_items.append(HTML.li(text_))
-
-        # Create a link to the very last page (unless we are on the last
-        # page or there would be no need to insert '..' spacers)
-        if self.page != self.last_page and rightmost_page < self.last_page:
-            nav_items.append(HTML.li(self._pagerlink(self.last_page, self.last_page)))
-
-        #_page_link = url.current()
-        #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
-        #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
-        return self.separator.join(nav_items)
-
-    def pager(self, format='<ul class="pagination">$link_previous ~2~ $link_next</ul>', page_param='page', partial_param='partial',
-        show_if_single_page=False, separator=' ', onclick=None,
-        symbol_first='<<', symbol_last='>>',
-        symbol_previous='<', symbol_next='>',
-        link_attr=None,
-        curpage_attr=None,
-        dotdot_attr=None, **kwargs
-    ):
-        self.curpage_attr = curpage_attr or {'class': 'active'}
-        self.separator = separator
-        self.pager_kwargs = kwargs
-        self.page_param = page_param
-        self.partial_param = partial_param
-        self.onclick = onclick
-        self.link_attr = link_attr or {'class': 'pager_link', 'rel': 'prerender'}
-        self.dotdot_attr = dotdot_attr or {'class': 'pager_dotdot'}
+    def __init__(self, collection,
+                 page=1, items_per_page=20, item_count=None,
+                 **kwargs):
+        if isinstance(collection, sqlalchemy.orm.query.Query):
+            collection = paginate_sqlalchemy.SqlalchemyOrmWrapper(collection)
+        paginate.Page.__init__(self, collection, page=page, items_per_page=items_per_page, item_count=item_count,
+                               url_maker=lambda page: url.current(page=page, **kwargs))
 
-        # Don't show navigator if there is no more than one page
-        if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
-            return ''
-
-        from string import Template
-        # Replace ~...~ in token format by range of pages
-        result = re.sub(r'~(\d+)~', self._range, format)
-
-        # Interpolate '%' variables
-        result = Template(result).safe_substitute({
-            'first_page': self.first_page,
-            'last_page': self.last_page,
-            'page': self.page,
-            'page_count': self.page_count,
-            'items_per_page': self.items_per_page,
-            'first_item': self.first_item,
-            'last_item': self.last_item,
-            'item_count': self.item_count,
-            'link_first': self.page > self.first_page and
-                    self._pagerlink(self.first_page, symbol_first) or '',
-            'link_last': self.page < self.last_page and
-                    self._pagerlink(self.last_page, symbol_last) or '',
-            'link_previous': HTML.li(self.previous_page and
-                    self._pagerlink(self.previous_page, symbol_previous)
-                    or HTML.a(symbol_previous)),
-            'link_next': HTML.li(self.next_page and
-                    self._pagerlink(self.next_page, symbol_next)
-                    or HTML.a(symbol_next)),
-        })
-
-        return literal(result)
-
-
-class RepoPage(Page):
-
-    def __init__(self, collection, page=1, items_per_page=20,
-                 item_count=None, **kwargs):
-
-        """Create a "RepoPage" instance. special pager for paging
-        repository
-        """
-        # TODO: call baseclass __init__
-        self._url_generator = kwargs.pop('url', url.current)
-
-        # Safe the kwargs class-wide so they can be used in the pager() method
-        self.kwargs = kwargs
-
-        # Save a reference to the collection
-        self.original_collection = collection
-
-        self.collection = collection
+    def pager(self):
+        return literal(
+            paginate.Page.pager(self,
+                format='<ul class="pagination">$link_previous\n~4~$link_next</ul>',
+                link_attr={'class': 'pager_link'},
+                dotdot_attr={'class': 'pager_dotdot'},
+                separator='\n',
+                ))
 
-        # The self.page is the number of the current page.
-        # The first page has the number 1!
-        try:
-            self.page = int(page)  # make it int() if we get it as a string
-        except (ValueError, TypeError):
-            log.error("Invalid page value: %r", page)
-            self.page = 1
-
-        self.items_per_page = items_per_page
-
-        # Unless the user tells us how many items the collections has
-        # we calculate that ourselves.
-        if item_count is not None:
-            self.item_count = item_count
-        else:
-            self.item_count = len(self.collection)
-
-        # Compute the number of the first and last available page
-        if self.item_count > 0:
-            self.first_page = 1
-            self.page_count = int(math.ceil(float(self.item_count) /
-                                            self.items_per_page))
-            self.last_page = self.first_page + self.page_count - 1
-
-            # Make sure that the requested page number is the range of
-            # valid pages
-            if self.page > self.last_page:
-                self.page = self.last_page
-            elif self.page < self.first_page:
-                self.page = self.first_page
+    @staticmethod
+    def default_link_tag(item):
+        # based on the base class implementation, but wrapping results in <li>, and with different handling of current_page
+        text = item['value']
+        if item['type'] == 'current_page':  # we need active on the li and can thus not use curpage_attr
+            return '''<li class="active"><span>%s</span></li>''' % text
 
-            # Note: the number of items on this page can be less than
-            #       items_per_page if the last page is not full
-            self.first_item = max(0, (self.item_count) - (self.page *
-                                                          items_per_page))
-            self.last_item = ((self.item_count - 1) - items_per_page *
-                              (self.page - 1))
-
-            self.items = list(self.collection[self.first_item:self.last_item + 1])
-
-            # Links to previous and next page
-            if self.page > self.first_page:
-                self.previous_page = self.page - 1
-            else:
-                self.previous_page = None
-
-            if self.page < self.last_page:
-                self.next_page = self.page + 1
-            else:
-                self.next_page = None
-
-        # No items available
+        if not item['href'] or item['type'] == 'span':
+            if item['attrs']:
+                text = paginate.make_html_tag('span', **item['attrs']) + text + '</span>'
         else:
-            self.first_page = None
-            self.page_count = 0
-            self.last_page = None
-            self.first_item = None
-            self.last_item = None
-            self.previous_page = None
-            self.next_page = None
-            self.items = []
-
-        # This is a subclass of the 'list' type. Initialise the list now.
-        list.__init__(self, reversed(self.items))
+            target_url = item['href']
+            text =  paginate.make_html_tag('a', text=text, href=target_url, **item['attrs'])
+        return '''<li>%s</li>''' % text
--- a/kallithea/lib/paster_commands/template.ini.mako	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/paster_commands/template.ini.mako	Sat May 02 21:20:43 2020 +0200
@@ -1,11 +1,11 @@
 ## -*- coding: utf-8 -*-
-<%text>################################################################################</%text>
-<%text>################################################################################</%text>
-# Kallithea - config file generated with kallithea-config                      #
-#                                                                              #
-# The %(here)s variable will be replaced with the parent directory of this file#
-<%text>################################################################################</%text>
-<%text>################################################################################</%text>
+<%text>###################################################################################</%text>
+<%text>###################################################################################</%text>
+<%text>## Kallithea config file generated with kallithea-config                         ##</%text>
+<%text>##                                                                               ##</%text>
+<%text>## The %(here)s variable will be replaced with the parent directory of this file ##</%text>
+<%text>###################################################################################</%text>
+<%text>###################################################################################</%text>
 
 [DEFAULT]
 
@@ -111,64 +111,33 @@
 %endif
 %else:
 <%text>## UWSGI ##</%text>
-<%text>## run with uwsgi --ini-paste-logged <inifile.ini></%text>
 [uwsgi]
-socket = /tmp/uwsgi.sock
-master = true
-http = ${host}:${port}
-
-<%text>## set as daemon and redirect all output to file</%text>
-#daemonize = ./uwsgi_kallithea.log
-
-<%text>## master process PID</%text>
-pidfile = ./uwsgi_kallithea.pid
+<%text>## Note: this section is parsed by the uWSGI .ini parser when run as:</%text>
+<%text>## uwsgi --venv /srv/kallithea/venv --ini-paste-logged my.ini</%text>
+<%text>## Note: in uWSGI 2.0.18 or older, pastescript needs to be installed to</%text>
+<%text>## get correct application logging. In later versions this is not necessary.</%text>
+<%text>## pip install pastescript</%text>
 
-<%text>## stats server with workers statistics, use uwsgitop</%text>
-<%text>## for monitoring, `uwsgitop 127.0.0.1:1717`</%text>
-stats = 127.0.0.1:1717
-memory-report = true
-
-<%text>## log 5XX errors</%text>
-log-5xx = true
-
-<%text>## Set the socket listen queue size.</%text>
-listen = 128
-
-<%text>## Gracefully Reload workers after the specified amount of managed requests</%text>
-<%text>## (avoid memory leaks).</%text>
-max-requests = 1000
+<%text>## HTTP Basics:</%text>
+http-socket = ${host}:${port}
+buffer-size = 65535                    ; Mercurial will use huge GET headers for discovery
 
-<%text>## enable large buffers</%text>
-buffer-size = 65535
-
-<%text>## socket and http timeouts ##</%text>
-http-timeout = 3600
-socket-timeout = 3600
-
-<%text>## Log requests slower than the specified number of milliseconds.</%text>
-log-slow = 10
-
-<%text>## Exit if no app can be loaded.</%text>
-need-app = true
-
-<%text>## Set lazy mode (load apps in workers instead of master).</%text>
-lazy = true
+<%text>## Scaling:</%text>
+master = true                          ; Use separate master and worker processes
+auto-procname = true                   ; Name worker processes accordingly
+lazy = true                            ; App *must* be loaded in workers - db connections can't be shared
+workers = 4                            ; On demand scaling up to this many worker processes
+cheaper = 1                            ; Initial and on demand scaling down to this many worker processes
+max-requests = 1000                    ; Graceful reload of worker processes to avoid leaks
 
-<%text>## scaling ##</%text>
-<%text>## set cheaper algorithm to use, if not set default will be used</%text>
-cheaper-algo = spare
-
-<%text>## minimum number of workers to keep at all times</%text>
-cheaper = 1
-
-<%text>## number of workers to spawn at startup</%text>
-cheaper-initial = 1
-
-<%text>## maximum number of workers that can be spawned</%text>
-workers = 4
-
-<%text>## how many workers should be spawned at a time</%text>
-cheaper-step = 1
+<%text>## Tweak defaults:</%text>
+strict = true                          ; Fail on unknown config directives
+enable-threads = true                  ; Enable Python threads (not threaded workers)
+vacuum = true                          ; Delete sockets during shutdown
+single-interpreter = true
+die-on-term = true                     ; Shutdown when receiving SIGTERM (default is respawn)
+need-app = true                        ; Exit early if no app can be loaded.
+reload-on-exception = true             ; Don't assume that the application worker can process more requests after a severe error
 
 %endif
 <%text>## middleware for hosting the WSGI application under a URL prefix</%text>
@@ -220,7 +189,7 @@
 <%text>## used, which is correct in many cases but for example not when using uwsgi.</%text>
 <%text>## If you change this setting, you should reinstall the Git hooks via</%text>
 <%text>## Admin > Settings > Remap and Rescan.</%text>
-# git_hook_interpreter = /srv/kallithea/venv/bin/python2
+#git_hook_interpreter = /srv/kallithea/venv/bin/python3
 %if git_hook_interpreter:
 git_hook_interpreter = ${git_hook_interpreter}
 %endif
@@ -295,7 +264,7 @@
 <%text>## issue_pat, issue_server_link and issue_sub can have suffixes to specify</%text>
 <%text>## multiple patterns, to other issues server, wiki or others</%text>
 <%text>## below an example how to create a wiki pattern</%text>
-# wiki-some-id -> https://wiki.example.com/some-id
+<%text>## wiki-some-id -> https://wiki.example.com/some-id</%text>
 
 #issue_pat_wiki = wiki-(\S+)
 #issue_server_link_wiki = https://wiki.example.com/\1
@@ -313,12 +282,12 @@
 allow_custom_hooks_settings = True
 
 <%text>## extra extensions for indexing, space separated and without the leading '.'.</%text>
-# index.extensions =
+#index.extensions =
 #    gemfile
 #    lock
 
 <%text>## extra filenames for indexing, space separated</%text>
-# index.filenames =
+#index.filenames =
 #    .dockerignore
 #    .editorconfig
 #    INSTALL
@@ -356,25 +325,23 @@
 <%text>###        CELERY CONFIG        ####</%text>
 <%text>####################################</%text>
 
+<%text>## Note: Celery doesn't support Windows.</%text>
 use_celery = false
 
-<%text>## Example: connect to the virtual host 'rabbitmqhost' on localhost as rabbitmq:</%text>
-broker.url = amqp://rabbitmq:qewqew@localhost:5672/rabbitmqhost
+<%text>## Celery config settings from https://docs.celeryproject.org/en/4.4.0/userguide/configuration.html prefixed with 'celery.'.</%text>
 
-celery.imports = kallithea.lib.celerylib.tasks
-celery.accept.content = pickle
-celery.result.backend = amqp
-celery.result.dburi = amqp://
-celery.result.serializer = json
+<%text>## Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea':</%text>
+celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost
 
-#celery.send.task.error.emails = true
+celery.result.backend = db+sqlite:///celery-results.db
+
 #celery.amqp.task.result.expires = 18000
 
-celeryd.concurrency = 2
-celeryd.max.tasks.per.child = 1
+celery.worker_concurrency = 2
+celery.worker_max_tasks_per_child = 1
 
 <%text>## If true, tasks will never be sent to the queue, but executed locally instead.</%text>
-celery.always.eager = false
+celery.task_always_eager = false
 
 <%text>####################################</%text>
 <%text>###         BEAKER CACHE        ####</%text>
@@ -383,19 +350,15 @@
 beaker.cache.data_dir = %(here)s/data/cache/data
 beaker.cache.lock_dir = %(here)s/data/cache/lock
 
-beaker.cache.regions = short_term,long_term,sql_cache_short
-
-beaker.cache.short_term.type = memory
-beaker.cache.short_term.expire = 60
-beaker.cache.short_term.key_length = 256
+beaker.cache.regions = long_term,long_term_file
 
 beaker.cache.long_term.type = memory
 beaker.cache.long_term.expire = 36000
 beaker.cache.long_term.key_length = 256
 
-beaker.cache.sql_cache_short.type = memory
-beaker.cache.sql_cache_short.expire = 10
-beaker.cache.sql_cache_short.key_length = 256
+beaker.cache.long_term_file.type = file
+beaker.cache.long_term_file.expire = 604800
+beaker.cache.long_term_file.key_length = 256
 
 <%text>####################################</%text>
 <%text>###       BEAKER SESSION        ####</%text>
@@ -429,12 +392,24 @@
 #session.sa.url = postgresql://postgres:qwe@localhost/kallithea
 #session.table_name = db_session
 
-<%text>############################</%text>
-<%text>## ERROR HANDLING SYSTEMS ##</%text>
-<%text>############################</%text>
+<%text>####################################</%text>
+<%text>###       ERROR HANDLING        ####</%text>
+<%text>####################################</%text>
+
+<%text>## Show a nice error page for application HTTP errors and exceptions (default true)</%text>
+#errorpage.enabled = true
 
-# Propagate email settings to ErrorReporter of TurboGears2
-# You do not normally need to change these lines
+<%text>## Enable Backlash client-side interactive debugger (default false)</%text>
+<%text>## WARNING: *THIS MUST BE false IN PRODUCTION ENVIRONMENTS!!!*</%text>
+<%text>## This debug mode will allow all visitors to execute malicious code.</%text>
+#debug = false
+
+<%text>## Enable Backlash server-side error reporting (unless debug mode handles it client-side) (default true)</%text>
+#trace_errors.enable = true
+<%text>## Errors will be reported by mail if trace_errors.error_email is set.</%text>
+
+<%text>## Propagate email settings to ErrorReporter of TurboGears2</%text>
+<%text>## You do not normally need to change these lines</%text>
 get trace_errors.smtp_server = smtp_server
 get trace_errors.smtp_port = smtp_port
 get trace_errors.from_address = error_email_from
@@ -443,63 +418,7 @@
 get trace_errors.smtp_password = smtp_password
 get trace_errors.smtp_use_tls = smtp_use_tls
 
-%if error_aggregation_service == 'appenlight':
-<%text>####################</%text>
-<%text>### [appenlight] ###</%text>
-<%text>####################</%text>
-
-<%text>## AppEnlight is tailored to work with Kallithea, see</%text>
-<%text>## http://appenlight.com for details how to obtain an account</%text>
-<%text>## you must install python package `appenlight_client` to make it work</%text>
-
-<%text>## appenlight enabled</%text>
-appenlight = false
-
-appenlight.server_url = https://api.appenlight.com
-appenlight.api_key = YOUR_API_KEY
-
-<%text>## TWEAK AMOUNT OF INFO SENT HERE</%text>
-
-<%text>## enables 404 error logging (default False)</%text>
-appenlight.report_404 = false
-
-<%text>## time in seconds after request is considered being slow (default 1)</%text>
-appenlight.slow_request_time = 1
-
-<%text>## record slow requests in application</%text>
-<%text>## (needs to be enabled for slow datastore recording and time tracking)</%text>
-appenlight.slow_requests = true
-
-<%text>## enable hooking to application loggers</%text>
-#appenlight.logging = true
-
-<%text>## minimum log level for log capture</%text>
-#appenlight.logging.level = WARNING
-
-<%text>## send logs only from erroneous/slow requests</%text>
-<%text>## (saves API quota for intensive logging)</%text>
-appenlight.logging_on_error = false
-
-<%text>## list of additional keywords that should be grabbed from environ object</%text>
-<%text>## can be string with comma separated list of words in lowercase</%text>
-<%text>## (by default client will always send following info:</%text>
-<%text>## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that</%text>
-<%text>## start with HTTP* this list be extended with additional keywords here</%text>
-appenlight.environ_keys_whitelist =
-
-<%text>## list of keywords that should be blanked from request object</%text>
-<%text>## can be string with comma separated list of words in lowercase</%text>
-<%text>## (by default client will always blank keys that contain following words</%text>
-<%text>## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'</%text>
-<%text>## this list be extended with additional keywords set here</%text>
-appenlight.request_keys_blacklist =
-
-<%text>## list of namespaces that should be ignores when gathering log entries</%text>
-<%text>## can be string with comma separated list of namespaces</%text>
-<%text>## (by default the client ignores own entries: appenlight_client.client)</%text>
-appenlight.log_namespace_blacklist =
-
-%elif error_aggregation_service == 'sentry':
+%if error_aggregation_service == 'sentry':
 <%text>################</%text>
 <%text>### [sentry] ###</%text>
 <%text>################</%text>
@@ -519,12 +438,6 @@
 sentry.exclude_paths =
 
 %endif
-<%text>################################################################################</%text>
-<%text>## WARNING: *DEBUG MODE MUST BE OFF IN A PRODUCTION ENVIRONMENT*              ##</%text>
-<%text>## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##</%text>
-<%text>## execute malicious code after an exception is raised.                       ##</%text>
-<%text>################################################################################</%text>
-debug = false
 
 <%text>##################################</%text>
 <%text>###       LOGVIEW CONFIG       ###</%text>
@@ -539,19 +452,19 @@
 <%text>#########################################################</%text>
 
 %if database_engine == 'sqlite':
-# SQLITE [default]
+<%text>## SQLITE [default]</%text>
 sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
 
 %elif database_engine == 'postgres':
-# POSTGRESQL
+<%text>## POSTGRESQL</%text>
 sqlalchemy.url = postgresql://user:pass@localhost/kallithea
 
 %elif database_engine == 'mysql':
-# MySQL
+<%text>## MySQL</%text>
 sqlalchemy.url = mysql://user:pass@localhost/kallithea?charset=utf8
 
 %endif
-# see sqlalchemy docs for others
+<%text>## see sqlalchemy docs for other backends</%text>
 
 sqlalchemy.pool_recycle = 3600
 
@@ -582,8 +495,8 @@
 [logger_root]
 level = NOTSET
 handlers = console
-# For coloring based on log level:
-# handlers = console_color
+<%text>## For coloring based on log level:</%text>
+#handlers = console_color
 
 [logger_routes]
 level = WARN
@@ -620,10 +533,10 @@
 level = WARN
 handlers =
 qualname = sqlalchemy.engine
-# For coloring based on log level and pretty printing of SQL:
-# level = INFO
-# handlers = console_color_sql
-# propagate = 0
+<%text>## For coloring based on log level and pretty printing of SQL:</%text>
+#level = INFO
+#handlers = console_color_sql
+#propagate = 0
 
 [logger_whoosh_indexer]
 level = WARN
@@ -650,13 +563,13 @@
 formatter = generic
 
 [handler_console_color]
-# ANSI color coding based on log level
+<%text>## ANSI color coding based on log level</%text>
 class = StreamHandler
 args = (sys.stderr,)
 formatter = color_formatter
 
 [handler_console_color_sql]
-# ANSI color coding and pretty printing of SQL statements
+<%text>## ANSI color coding and pretty printing of SQL statements</%text>
 class = StreamHandler
 args = (sys.stderr,)
 formatter = color_formatter_sql
@@ -687,16 +600,16 @@
 <%text>## SSH LOGGING ##</%text>
 <%text>#################</%text>
 
-# The default loggers use 'handler_console' that uses StreamHandler with
-# destination 'sys.stderr'. In the context of the SSH server process, these log
-# messages would be sent to the client, which is normally not what you want.
-# By default, when running ssh-serve, just use NullHandler and disable logging
-# completely. For other logging options, see:
-# https://docs.python.org/2/library/logging.handlers.html
+<%text>## The default loggers use 'handler_console' that uses StreamHandler with</%text>
+<%text>## destination 'sys.stderr'. In the context of the SSH server process, these log</%text>
+<%text>## messages would be sent to the client, which is normally not what you want.</%text>
+<%text>## By default, when running ssh-serve, just use NullHandler and disable logging</%text>
+<%text>## completely. For other logging options, see:</%text>
+<%text>## https://docs.python.org/2/library/logging.handlers.html</%text>
 
 [ssh_serve:logger_root]
 level = CRITICAL
 handlers = null
 
-# Note: If logging is configured with other handlers, they might need similar
-# muting for ssh-serve too.
+<%text>## Note: If logging is configured with other handlers, they might need similar</%text>
+<%text>## muting for ssh-serve too.</%text>
--- a/kallithea/lib/pidlock.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/pidlock.py	Sat May 02 21:20:43 2020 +0200
@@ -12,8 +12,6 @@
 # You should have received a copy of the GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from __future__ import print_function
-
 import errno
 import os
 from multiprocessing.util import Finalize
@@ -137,6 +135,6 @@
         dir_, file_ = os.path.split(pidfile)
         if not os.path.isdir(dir_):
             os.makedirs(dir_)
-        with open(self.pidfile, 'wb') as f:
+        with open(self.pidfile, 'w') as f:
             f.write(lockname)
         self.held = True
--- a/kallithea/lib/pygmentsutils.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/pygmentsutils.py	Sat May 02 21:20:43 2020 +0200
@@ -26,7 +26,6 @@
 """
 
 from collections import defaultdict
-from itertools import ifilter
 
 from pygments import lexers
 
@@ -59,15 +58,11 @@
     """
     Get list of known indexable filenames from pygment lexer internals
     """
-
     filenames = []
-
-    def likely_filename(s):
-        return s.find('*') == -1 and s.find('[') == -1
-
     for lx, t in sorted(lexers.LEXERS.items()):
-        for f in ifilter(likely_filename, t[-2]):
-            filenames.append(f)
+        for f in t[-2]:
+            if '*' not in f and '[' not in f:
+                filenames.append(f)
 
     return filenames
 
--- a/kallithea/lib/rcmail/message.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/rcmail/message.py	Sat May 02 21:20:43 2020 +0200
@@ -2,35 +2,6 @@
 from kallithea.lib.rcmail.response import MailResponse
 
 
-class Attachment(object):
-    """
-    Encapsulates file attachment information.
-
-    :param filename: filename of attachment
-    :param content_type: file mimetype
-    :param data: the raw file data, either as string or file obj
-    :param disposition: content-disposition (if any)
-    """
-
-    def __init__(self,
-                 filename=None,
-                 content_type=None,
-                 data=None,
-                 disposition=None):
-
-        self.filename = filename
-        self.content_type = content_type
-        self.disposition = disposition or 'attachment'
-        self._data = data
-
-    @property
-    def data(self):
-        if isinstance(self._data, basestring):
-            return self._data
-        self._data = self._data.read()
-        return self._data
-
-
 class Message(object):
     """
     Encapsulates an email message.
--- a/kallithea/lib/rcmail/response.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/rcmail/response.py	Sat May 02 21:20:43 2020 +0200
@@ -44,7 +44,9 @@
 
 ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc']
 DEFAULT_ENCODING = "utf-8"
-VALUE_IS_EMAIL_ADDRESS = lambda v: '@' in v
+
+def VALUE_IS_EMAIL_ADDRESS(v):
+    return '@' in v
 
 
 def normalize_header(header):
@@ -87,7 +89,7 @@
     def __delitem__(self, key):
         del self.headers[normalize_header(key)]
 
-    def __nonzero__(self):
+    def __bool__(self):
         return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0
 
     def keys(self):
@@ -339,20 +341,20 @@
 
     try:
         out = MIMEPart(ctype, **params)
-    except TypeError as exc:  # pragma: no cover
+    except TypeError as e:  # pragma: no cover
         raise EncodingError("Content-Type malformed, not allowed: %r; "
-                            "%r (Python ERROR: %s" %
-                            (ctype, params, exc.message))
+                            "%r (Python ERROR: %s)" %
+                            (ctype, params, e.args[0]))
 
     for k in mail.keys():
         if k in ADDRESS_HEADERS_WHITELIST:
-            out[k.encode('ascii')] = header_to_mime_encoding(
+            out[k] = header_to_mime_encoding(
                                          mail[k],
                                          not_email=False,
                                          separator=separator
                                      )
         else:
-            out[k.encode('ascii')] = header_to_mime_encoding(
+            out[k] = header_to_mime_encoding(
                                          mail[k],
                                          not_email=True
                                     )
@@ -392,7 +394,7 @@
         if mail.body is None:
             return  # only None, '' is still ok
 
-        ctype, ctype_params = mail.content_encoding['Content-Type']
+        ctype, _ctype_params = mail.content_encoding['Content-Type']
         cdisp, cdisp_params = mail.content_encoding['Content-Disposition']
 
         assert ctype, ("Extract payload requires that mail.content_encoding "
@@ -422,7 +424,7 @@
         return ""
 
     encoder = Charset(DEFAULT_ENCODING)
-    if type(value) == list:
+    if isinstance(value, list):
         return separator.join(properly_encode_header(
             v, encoder, not_email) for v in value)
     else:
@@ -443,12 +445,12 @@
     check different, then change this.
     """
     try:
-        return value.encode("ascii")
-    except UnicodeEncodeError:
+        value.encode("ascii")
+        return value
+    except UnicodeError:
         if not not_email and VALUE_IS_EMAIL_ADDRESS(value):
             # this could have an email address, make sure we don't screw it up
             name, address = parseaddr(value)
-            return '"%s" <%s>' % (
-                encoder.header_encode(name.encode("utf-8")), address)
+            return '"%s" <%s>' % (encoder.header_encode(name), address)
 
-        return encoder.header_encode(value.encode("utf-8"))
+        return encoder.header_encode(value)
--- a/kallithea/lib/rcmail/smtp_mailer.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/rcmail/smtp_mailer.py	Sat May 02 21:20:43 2020 +0200
@@ -64,7 +64,7 @@
     def send(self, recipients=None, subject='', body='', html='',
              attachment_files=None, headers=None):
         recipients = recipients or []
-        if isinstance(recipients, basestring):
+        if isinstance(recipients, str):
             recipients = [recipients]
         if headers is None:
             headers = {}
--- a/kallithea/lib/recaptcha.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/recaptcha.py	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,7 @@
 # -*- coding: utf-8 -*-
 import json
-import urllib
-import urllib2
+import urllib.parse
+import urllib.request
 
 
 class RecaptchaResponse(object):
@@ -26,17 +26,17 @@
         return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
 
     def encode_if_necessary(s):
-        if isinstance(s, unicode):
+        if isinstance(s, str):
             return s.encode('utf-8')
         return s
 
-    params = urllib.urlencode({
+    params = urllib.parse.urlencode({
         'secret': encode_if_necessary(private_key),
         'remoteip': encode_if_necessary(remoteip),
         'response': encode_if_necessary(g_recaptcha_response),
-    })
+    }).encode('ascii')
 
-    req = urllib2.Request(
+    req = urllib.request.Request(
         url="https://www.google.com/recaptcha/api/siteverify",
         data=params,
         headers={
@@ -45,7 +45,7 @@
         }
     )
 
-    httpresp = urllib2.urlopen(req)
+    httpresp = urllib.request.urlopen(req)
     return_values = json.loads(httpresp.read())
     httpresp.close()
 
--- a/kallithea/lib/ssh.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/ssh.py	Sat May 02 21:20:43 2020 +0200
@@ -21,12 +21,14 @@
 # You should have received a copy of the GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-import binascii
+import base64
 import logging
 import re
 
 from tg.i18n import ugettext as _
 
+from kallithea.lib.utils2 import ascii_bytes, ascii_str
+
 
 log = logging.getLogger(__name__)
 
@@ -42,32 +44,32 @@
     >>> parse_pub_key('')
     Traceback (most recent call last):
     ...
-    SshKeyParseError: SSH key is missing
+    kallithea.lib.ssh.SshKeyParseError: SSH key is missing
     >>> parse_pub_key('''AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    SshKeyParseError: Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='
+    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='
     >>> parse_pub_key('''abc AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    SshKeyParseError: Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'
+    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'
     >>> parse_pub_key('''ssh-rsa  AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    SshKeyParseError: Incorrect SSH key - failed to decode base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ'
+    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - failed to decode base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ'
     >>> parse_pub_key('''ssh-rsa  AAAAB2NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==''')
     Traceback (most recent call last):
     ...
-    SshKeyParseError: Incorrect SSH key - base64 part is not 'ssh-rsa' as claimed but 'csh-rsa'
+    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - base64 part is not 'ssh-rsa' as claimed but 'csh-rsa'
     >>> parse_pub_key('''ssh-rsa  AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    SshKeyParseError: Incorrect SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ"
+    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ"
     >>> parse_pub_key(''' ssh-rsa  AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ== and a comment
     ... ''')
-    ('ssh-rsa', '\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x0bThis is fake!', 'and a comment\n')
+    ('ssh-rsa', b'\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x0bThis is fake!', 'and a comment\n')
     >>> parse_pub_key('''ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIP1NA2kBQIKe74afUXmIWD9ByDYQJqUwW44Y4gJOBRuo''')
-    ('ssh-ed25519', '\x00\x00\x00\x0bssh-ed25519\x00\x00\x00 \xfdM\x03i\x01@\x82\x9e\xef\x86\x9fQy\x88X?A\xc86\x10&\xa50[\x8e\x18\xe2\x02N\x05\x1b\xa8', '')
+    ('ssh-ed25519', b'\x00\x00\x00\x0bssh-ed25519\x00\x00\x00 \xfdM\x03i\x01@\x82\x9e\xef\x86\x9fQy\x88X?A\xc86\x10&\xa50[\x8e\x18\xe2\x02N\x05\x1b\xa8', '')
     """
     if not ssh_key:
         raise SshKeyParseError(_("SSH key is missing"))
@@ -84,14 +86,14 @@
         raise SshKeyParseError(_("Incorrect SSH key - unexpected characters in base64 part %r") % keyvalue)
 
     try:
-        decoded = keyvalue.decode('base64')
-    except binascii.Error:
+        key_bytes = base64.b64decode(keyvalue)
+    except base64.binascii.Error:
         raise SshKeyParseError(_("Incorrect SSH key - failed to decode base64 part %r") % keyvalue)
 
-    if not decoded.startswith('\x00\x00\x00' + chr(len(keytype)) + str(keytype) + '\x00'):
-        raise SshKeyParseError(_("Incorrect SSH key - base64 part is not %r as claimed but %r") % (str(keytype), str(decoded[4:].split('\0', 1)[0])))
+    if not key_bytes.startswith(b'\x00\x00\x00%c%s\x00' % (len(keytype), ascii_bytes(keytype))):
+        raise SshKeyParseError(_("Incorrect SSH key - base64 part is not %r as claimed but %r") % (keytype, ascii_str(key_bytes[4:].split(b'\0', 1)[0])))
 
-    return keytype, decoded, comment
+    return keytype, key_bytes, comment
 
 
 SSH_OPTIONS = 'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding'
@@ -121,13 +123,14 @@
     'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,command="/srv/kallithea/venv/bin/kallithea-cli ssh-serve -c /srv/kallithea/my.ini 7 17" ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==\\n'
     """
     try:
-        keytype, decoded, comment = parse_pub_key(key.public_key)
+        keytype, key_bytes, comment = parse_pub_key(key.public_key)
     except SshKeyParseError:
         return '# Invalid Kallithea SSH key: %s %s\n' % (key.user.user_id, key.user_ssh_key_id)
-    mimekey = decoded.encode('base64').replace('\n', '')
-    if not _safe_check(mimekey):
+    base64_key = ascii_str(base64.b64encode(key_bytes))
+    assert '\n' not in base64_key
+    if not _safe_check(base64_key):
         return '# Invalid Kallithea SSH key - bad base64 encoding: %s %s\n' % (key.user.user_id, key.user_ssh_key_id)
     return '%s,command="%s ssh-serve -c %s %s %s" %s %s\n' % (
         SSH_OPTIONS, kallithea_cli_path, config_file,
         key.user.user_id, key.user_ssh_key_id,
-        keytype, mimekey)
+        keytype, base64_key)
--- a/kallithea/lib/timerproxy.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/timerproxy.py	Sat May 02 21:20:43 2020 +0200
@@ -20,7 +20,7 @@
 
 log = logging.getLogger('timerproxy')
 
-BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
+BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38)
 
 
 def color_sql(sql):
--- a/kallithea/lib/utils.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/utils.py	Sat May 02 21:20:43 2020 +0200
@@ -31,21 +31,24 @@
 import re
 import sys
 import traceback
+import urllib.error
 from distutils.version import StrictVersion
 
-import beaker
-from beaker.cache import _cache_decorate
-from tg.i18n import ugettext as _
+import mercurial.config
+import mercurial.error
+import mercurial.ui
 
-from kallithea.lib.exceptions import HgsubversionImportError
-from kallithea.lib.utils2 import get_current_authuser, safe_str, safe_unicode
-from kallithea.lib.vcs.exceptions import VCSError
+import kallithea.config.conf
+from kallithea.lib.exceptions import InvalidCloneUriException
+from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes, safe_str
+from kallithea.lib.vcs.backends.git.repository import GitRepository
+from kallithea.lib.vcs.backends.hg.repository import MercurialRepository
+from kallithea.lib.vcs.conf import settings
+from kallithea.lib.vcs.exceptions import RepositoryError, VCSError
 from kallithea.lib.vcs.utils.fakemod import create_module
 from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.utils.hgcompat import config, ui
-from kallithea.model import meta
+from kallithea.model import db, meta
 from kallithea.model.db import RepoGroup, Repository, Setting, Ui, User, UserGroup, UserLog
-from kallithea.model.repo_group import RepoGroupModel
 
 
 log = logging.getLogger(__name__)
@@ -102,7 +105,6 @@
         rest = '/' + rest_
     repo_id = _get_permanent_id(first)
     if repo_id is not None:
-        from kallithea.model.db import Repository
         repo = Repository.get(repo_id)
         if repo is not None:
             return repo.repo_name + rest
@@ -130,7 +132,7 @@
 
     if getattr(user, 'user_id', None):
         user_obj = User.get(user.user_id)
-    elif isinstance(user, basestring):
+    elif isinstance(user, str):
         user_obj = User.get_by_username(user)
     else:
         raise Exception('You have to provide a user object or a username')
@@ -138,17 +140,17 @@
     if getattr(repo, 'repo_id', None):
         repo_obj = Repository.get(repo.repo_id)
         repo_name = repo_obj.repo_name
-    elif isinstance(repo, basestring):
+    elif isinstance(repo, str):
         repo_name = repo.lstrip('/')
         repo_obj = Repository.get_by_repo_name(repo_name)
     else:
         repo_obj = None
-        repo_name = u''
+        repo_name = ''
 
     user_log = UserLog()
     user_log.user_id = user_obj.user_id
     user_log.username = user_obj.username
-    user_log.action = safe_unicode(action)
+    user_log.action = action
 
     user_log.repository = repo_obj
     user_log.repository_name = repo_name
@@ -158,7 +160,7 @@
     meta.Session().add(user_log)
 
     log.info('Logging action:%s on %s by user:%s ip:%s',
-             action, safe_unicode(repo), user_obj, ipaddr)
+             action, repo, user_obj, ipaddr)
     if commit:
         meta.Session().commit()
 
@@ -172,7 +174,7 @@
     """
 
     # remove ending slash for better results
-    path = safe_str(path.rstrip(os.sep))
+    path = path.rstrip(os.sep)
     log.debug('now scanning in %s', path)
 
     def isdir(*n):
@@ -223,37 +225,43 @@
 
 
 def is_valid_repo_uri(repo_type, url, ui):
-    """Check if the url seems like a valid remote repo location - raise an Exception if any problems"""
+    """Check if the url seems like a valid remote repo location
+    Raise InvalidCloneUriException if any problems"""
     if repo_type == 'hg':
-        from kallithea.lib.vcs.backends.hg.repository import MercurialRepository
         if url.startswith('http') or url.startswith('ssh'):
             # initially check if it's at least the proper URL
             # or does it pass basic auth
-            MercurialRepository._check_url(url, ui)
+            try:
+                MercurialRepository._check_url(url, ui)
+            except urllib.error.URLError as e:
+                raise InvalidCloneUriException('URI %s URLError: %s' % (url, e))
+            except mercurial.error.RepoError as e:
+                raise InvalidCloneUriException('Mercurial %s: %s' % (type(e).__name__, safe_str(bytes(e))))
         elif url.startswith('svn+http'):
             try:
                 from hgsubversion.svnrepo import svnremoterepo
             except ImportError:
-                raise HgsubversionImportError(_('Unable to activate hgsubversion support. '
-                                                'The "hgsubversion" library is missing'))
+                raise InvalidCloneUriException('URI type %s not supported - hgsubversion is not available' % (url,))
             svnremoterepo(ui, url).svn.uuid
         elif url.startswith('git+http'):
-            raise NotImplementedError()
+            raise InvalidCloneUriException('URI type %s not implemented' % (url,))
         else:
-            raise Exception('URI %s not allowed' % (url,))
+            raise InvalidCloneUriException('URI %s not allowed' % (url,))
 
     elif repo_type == 'git':
-        from kallithea.lib.vcs.backends.git.repository import GitRepository
         if url.startswith('http') or url.startswith('git'):
             # initially check if it's at least the proper URL
             # or does it pass basic auth
-            GitRepository._check_url(url)
+            try:
+                GitRepository._check_url(url)
+            except urllib.error.URLError as e:
+                raise InvalidCloneUriException('URI %s URLError: %s' % (url, e))
         elif url.startswith('svn+http'):
-            raise NotImplementedError()
+            raise InvalidCloneUriException('URI type %s not implemented' % (url,))
         elif url.startswith('hg+http'):
-            raise NotImplementedError()
+            raise InvalidCloneUriException('URI type %s not implemented' % (url,))
         else:
-            raise Exception('URI %s not allowed' % (url))
+            raise InvalidCloneUriException('URI %s not allowed' % (url))
 
 
 def is_valid_repo(repo_name, base_path, scm=None):
@@ -269,7 +277,7 @@
     :return True: if given path is a valid repository
     """
     # TODO: paranoid security checks?
-    full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
+    full_path = os.path.join(base_path, repo_name)
 
     try:
         scm_ = get_scm(full_path)
@@ -287,7 +295,7 @@
     :param repo_name:
     :param base_path:
     """
-    full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
+    full_path = os.path.join(base_path, repo_group_name)
 
     # check if it's not a repo
     if is_valid_repo(repo_group_name, base_path):
@@ -309,65 +317,41 @@
     return False
 
 
-# propagated from mercurial documentation
-ui_sections = ['alias', 'auth',
-                'decode/encode', 'defaults',
-                'diff', 'email',
-                'extensions', 'format',
-                'merge-patterns', 'merge-tools',
-                'hooks', 'http_proxy',
-                'smtp', 'patch',
-                'paths', 'profiling',
-                'server', 'trusted',
-                'ui', 'web', ]
-
-
-def make_ui(repo_path=None, clear_session=True):
+def make_ui(repo_path=None):
     """
     Create an Mercurial 'ui' object based on database Ui settings, possibly
     augmenting with content from a hgrc file.
     """
-    baseui = ui.ui()
+    baseui = mercurial.ui.ui()
 
     # clean the baseui object
-    baseui._ocfg = config.config()
-    baseui._ucfg = config.config()
-    baseui._tcfg = config.config()
+    baseui._ocfg = mercurial.config.config()
+    baseui._ucfg = mercurial.config.config()
+    baseui._tcfg = mercurial.config.config()
 
     sa = meta.Session()
-    for ui_ in sa.query(Ui).all():
+    for ui_ in sa.query(Ui).order_by(Ui.ui_section, Ui.ui_key):
         if ui_.ui_active:
-            ui_val = '' if ui_.ui_value is None else safe_str(ui_.ui_value)
             log.debug('config from db: [%s] %s=%r', ui_.ui_section,
-                      ui_.ui_key, ui_val)
-            baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
-                             ui_val)
-    if clear_session:
-        meta.Session.remove()
+                      ui_.ui_key, ui_.ui_value)
+            baseui.setconfig(ascii_bytes(ui_.ui_section), ascii_bytes(ui_.ui_key),
+                             b'' if ui_.ui_value is None else safe_bytes(ui_.ui_value))
 
     # force set push_ssl requirement to False, Kallithea handles that
-    baseui.setconfig('web', 'push_ssl', False)
-    baseui.setconfig('web', 'allow_push', '*')
+    baseui.setconfig(b'web', b'push_ssl', False)
+    baseui.setconfig(b'web', b'allow_push', b'*')
     # prevent interactive questions for ssh password / passphrase
-    ssh = baseui.config('ui', 'ssh', default='ssh')
-    baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
+    ssh = baseui.config(b'ui', b'ssh', default=b'ssh')
+    baseui.setconfig(b'ui', b'ssh', b'%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
     # push / pull hooks
-    baseui.setconfig('hooks', 'changegroup.kallithea_log_push_action', 'python:kallithea.lib.hooks.log_push_action')
-    baseui.setconfig('hooks', 'outgoing.kallithea_log_pull_action', 'python:kallithea.lib.hooks.log_pull_action')
+    baseui.setconfig(b'hooks', b'changegroup.kallithea_log_push_action', b'python:kallithea.lib.hooks.log_push_action')
+    baseui.setconfig(b'hooks', b'outgoing.kallithea_log_pull_action', b'python:kallithea.lib.hooks.log_pull_action')
 
     if repo_path is not None:
-        hgrc_path = os.path.join(repo_path, '.hg', 'hgrc')
-        if os.path.isfile(hgrc_path):
-            log.debug('reading hgrc from %s', hgrc_path)
-            cfg = config.config()
-            cfg.read(hgrc_path)
-            for section in ui_sections:
-                for k, v in cfg.items(section):
-                    log.debug('config from file: [%s] %s=%s', section, k, v)
-                    baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
-        else:
-            log.debug('hgrc file is not present at %s, skipping...', hgrc_path)
+        # Note: MercurialRepository / mercurial.localrepo.instance will do this too, so it will always be possible to override db settings or what is hardcoded above
+        baseui.readconfig(repo_path)
 
+    assert baseui.plain()  # set by hgcompat.monkey_do (invoked from import of vcs.backends.hg) to minimize potential impact of loading config files
     return baseui
 
 
@@ -377,12 +361,10 @@
 
     :param config:
     """
-    try:
-        hgsettings = Setting.get_app_settings()
-        for k, v in hgsettings.items():
-            config[k] = v
-    finally:
-        meta.Session.remove()
+    hgsettings = Setting.get_app_settings()
+    for k, v in hgsettings.items():
+        config[k] = v
+    config['base_path'] = Ui.get_repos_location()
 
 
 def set_vcs_config(config):
@@ -391,16 +373,14 @@
 
     :param config: kallithea.CONFIG
     """
-    from kallithea.lib.vcs import conf
-    from kallithea.lib.utils2 import aslist
-    conf.settings.BACKENDS = {
+    settings.BACKENDS = {
         'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
         'git': 'kallithea.lib.vcs.backends.git.GitRepository',
     }
 
-    conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
-    conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
-    conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
+    settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
+    settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
+    settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
                                                         'utf-8'), sep=',')
 
 
@@ -410,13 +390,11 @@
 
     :param config: kallithea.CONFIG
     """
-    from kallithea.config import conf
-
     log.debug('adding extra into INDEX_EXTENSIONS')
-    conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', '')))
+    kallithea.config.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', '')))
 
     log.debug('adding extra into INDEX_FILENAMES')
-    conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', '')))
+    kallithea.config.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', '')))
 
 
 def map_groups(path):
@@ -427,8 +405,9 @@
 
     :param paths: full path to repository
     """
+    from kallithea.model.repo_group import RepoGroupModel
     sa = meta.Session()
-    groups = path.split(Repository.url_sep())
+    groups = path.split(db.URL_SEP)
     parent = None
     group = None
 
@@ -437,7 +416,7 @@
     rgm = RepoGroupModel()
     owner = User.get_first_admin()
     for lvl, group_name in enumerate(groups):
-        group_name = u'/'.join(groups[:lvl] + [group_name])
+        group_name = '/'.join(groups[:lvl] + [group_name])
         group = RepoGroup.get_by_group_name(group_name)
         desc = '%s group' % group_name
 
@@ -459,14 +438,14 @@
     return group
 
 
-def repo2db_mapper(initial_repo_list, remove_obsolete=False,
+def repo2db_mapper(initial_repo_dict, remove_obsolete=False,
                    install_git_hooks=False, user=None, overwrite_git_hooks=False):
     """
-    maps all repos given in initial_repo_list, non existing repositories
+    maps all repos given in initial_repo_dict, non existing repositories
     are created, if remove_obsolete is True it also check for db entries
-    that are not in initial_repo_list and removes them.
+    that are not in initial_repo_dict and removes them.
 
-    :param initial_repo_list: list of repositories found by scanning methods
+    :param initial_repo_dict: mapping with repositories found by scanning methods
     :param remove_obsolete: check for obsolete entries in database
     :param install_git_hooks: if this is True, also check and install git hook
         for a repo if missing
@@ -487,10 +466,9 @@
     enable_downloads = defs.get('repo_enable_downloads')
     private = defs.get('repo_private')
 
-    for name, repo in initial_repo_list.items():
+    for name, repo in initial_repo_dict.items():
         group = map_groups(name)
-        unicode_name = safe_unicode(name)
-        db_repo = repo_model.get_by_repo_name(unicode_name)
+        db_repo = repo_model.get_by_repo_name(name)
         # found repo that is on filesystem not in Kallithea database
         if not db_repo:
             log.info('repository %s not found, creating now', name)
@@ -526,9 +504,8 @@
 
     removed = []
     # remove from database those repositories that are not in the filesystem
-    unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list)
     for repo in sa.query(Repository).all():
-        if repo.repo_name not in unicode_initial_repo_list:
+        if repo.repo_name not in initial_repo_dict:
             if remove_obsolete:
                 log.debug("Removing non-existing repository found in db `%s`",
                           repo.repo_name)
@@ -544,9 +521,6 @@
 
 
 def load_rcextensions(root_path):
-    import kallithea
-    from kallithea.config import conf
-
     path = os.path.join(root_path, 'rcextensions', '__init__.py')
     if os.path.isfile(path):
         rcext = create_module('rc', path)
@@ -554,17 +528,17 @@
         log.debug('Found rcextensions now loading %s...', rcext)
 
         # Additional mappings that are not present in the pygments lexers
-        conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
+        kallithea.config.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
 
         # OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
 
         if getattr(EXT, 'INDEX_EXTENSIONS', []):
             log.debug('settings custom INDEX_EXTENSIONS')
-            conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
+            kallithea.config.conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
 
         # ADDITIONAL MAPPINGS
         log.debug('adding extra into INDEX_EXTENSIONS')
-        conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
+        kallithea.config.conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
 
         # auto check if the module is not missing any data, set to default if is
         # this will help autoupdate new feature of rcext module
@@ -585,28 +559,33 @@
     Checks what version of git is installed on the system, and raise a system exit
     if it's too old for Kallithea to work properly.
     """
-    from kallithea import BACKENDS
-    from kallithea.lib.vcs.backends.git.repository import GitRepository
-    from kallithea.lib.vcs.conf import settings
-
-    if 'git' not in BACKENDS:
+    if 'git' not in kallithea.BACKENDS:
         return None
 
     if not settings.GIT_EXECUTABLE_PATH:
         log.warning('No git executable configured - check "git_path" in the ini file.')
         return None
 
-    stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True,
-                                                    _safe=True)
+    try:
+        stdout, stderr = GitRepository._run_git_command(['--version'])
+    except RepositoryError as e:
+        # message will already have been logged as error
+        log.warning('No working git executable found - check "git_path" in the ini file.')
+        return None
 
     if stderr:
-        log.warning('Error/stderr from "%s --version": %r', settings.GIT_EXECUTABLE_PATH, stderr)
+        log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, safe_str(stderr))
 
-    m = re.search(r"\d+.\d+.\d+", stdout)
+    if not stdout:
+        log.warning('No working git executable found - check "git_path" in the ini file.')
+        return None
+
+    output = safe_str(stdout).strip()
+    m = re.search(r"\d+.\d+.\d+", output)
     if m:
         ver = StrictVersion(m.group(0))
         log.debug('Git executable: "%s", version %s (parsed from: "%s")',
-                  settings.GIT_EXECUTABLE_PATH, ver, stdout.strip())
+                  settings.GIT_EXECUTABLE_PATH, ver, output)
         if ver < git_req_ver:
             log.error('Kallithea detected %s version %s, which is too old '
                       'for the system to function properly. '
@@ -618,68 +597,7 @@
             sys.exit(1)
     else:
         ver = StrictVersion('0.0.0')
-        log.warning('Error finding version number in "%s --version" stdout: %r',
-                    settings.GIT_EXECUTABLE_PATH, stdout.strip())
+        log.warning('Error finding version number in "%s --version" stdout:\n%s',
+                    settings.GIT_EXECUTABLE_PATH, output)
 
     return ver
-
-
-#===============================================================================
-# CACHE RELATED METHODS
-#===============================================================================
-
-# set cache regions for beaker so celery can utilise it
-def setup_cache_regions(settings):
-    # Create dict with just beaker cache configs with prefix stripped
-    cache_settings = {'regions': None}
-    prefix = 'beaker.cache.'
-    for key in settings:
-        if key.startswith(prefix):
-            name = key[len(prefix):]
-            cache_settings[name] = settings[key]
-    # Find all regions, apply defaults, and apply to beaker
-    if cache_settings['regions']:
-        for region in cache_settings['regions'].split(','):
-            region = region.strip()
-            prefix = region + '.'
-            region_settings = {}
-            for key in cache_settings:
-                if key.startswith(prefix):
-                    name = key[len(prefix):]
-                    region_settings[name] = cache_settings[key]
-            region_settings.setdefault('expire',
-                                       cache_settings.get('expire', '60'))
-            region_settings.setdefault('lock_dir',
-                                       cache_settings.get('lock_dir'))
-            region_settings.setdefault('data_dir',
-                                       cache_settings.get('data_dir'))
-            region_settings.setdefault('type',
-                                       cache_settings.get('type', 'memory'))
-            beaker.cache.cache_regions[region] = region_settings
-
-
-def conditional_cache(region, prefix, condition, func):
-    """
-
-    Conditional caching function use like::
-        def _c(arg):
-            #heavy computation function
-            return data
-
-        # depending from condition the compute is wrapped in cache or not
-        compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func)
-        return compute(arg)
-
-    :param region: name of cache region
-    :param prefix: cache region prefix
-    :param condition: condition for cache to be triggered, and return data cached
-    :param func: wrapped heavy function to compute
-
-    """
-    wrapped = func
-    if condition:
-        log.debug('conditional_cache: True, wrapping call of '
-                  'func: %s into %s region cache' % (region, func))
-        wrapped = _cache_decorate((prefix,), None, None, region)(func)
-
-    return wrapped
--- a/kallithea/lib/utils2.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/utils2.py	Sat May 02 21:20:43 2020 +0200
@@ -27,25 +27,37 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-from __future__ import print_function
-
 import binascii
 import datetime
+import json
 import os
-import pwd
 import re
 import time
-import urllib
+import urllib.parse
 
 import urlobject
 from tg.i18n import ugettext as _
 from tg.i18n import ungettext
 from webhelpers2.text import collapse, remove_formatting, strip_tags
 
-from kallithea.lib.compat import json
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str  # re-export
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 
 
+try:
+    import pwd
+except ImportError:
+    pass
+
+
+# mute pyflakes "imported but unused"
+assert ascii_bytes
+assert ascii_str
+assert safe_bytes
+assert safe_str
+assert LazyProperty
+
+
 def str2bool(_str):
     """
     returns True/False value from given string, it tries to translate the
@@ -71,7 +83,7 @@
     :param sep:
     :param strip:
     """
-    if isinstance(obj, (basestring)):
+    if isinstance(obj, (str)):
         lst = obj.split(sep)
         if strip:
             lst = [v.strip() for v in lst]
@@ -98,14 +110,12 @@
     :rtype: str
     :return: converted line according to mode
     """
-    from string import replace
-
     if mode == 0:
-        line = replace(line, '\r\n', '\n')
-        line = replace(line, '\r', '\n')
+        line = line.replace('\r\n', '\n')
+        line = line.replace('\r', '\n')
     elif mode == 1:
-        line = replace(line, '\r\n', '\r')
-        line = replace(line, '\n', '\r')
+        line = line.replace('\r\n', '\r')
+        line = line.replace('\n', '\r')
     elif mode == 2:
         line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
     return line
@@ -142,7 +152,7 @@
         unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
     """
     # Hexadecimal certainly qualifies as URL-safe.
-    return binascii.hexlify(os.urandom(20))
+    return ascii_str(binascii.hexlify(os.urandom(20)))
 
 
 def safe_int(val, default=None):
@@ -153,104 +163,13 @@
     :param val:
     :param default:
     """
-
     try:
         val = int(val)
     except (ValueError, TypeError):
         val = default
-
     return val
 
 
-def safe_unicode(str_, from_encoding=None):
-    """
-    safe unicode function. Does few trick to turn str_ into unicode
-
-    In case of UnicodeDecode error we try to return it with encoding detected
-    by chardet library if it fails fallback to unicode with errors replaced
-
-    :param str_: string to decode
-    :rtype: unicode
-    :returns: unicode object
-    """
-    if isinstance(str_, unicode):
-        return str_
-
-    if not from_encoding:
-        import kallithea
-        DEFAULT_ENCODINGS = aslist(kallithea.CONFIG.get('default_encoding',
-                                                        'utf-8'), sep=',')
-        from_encoding = DEFAULT_ENCODINGS
-
-    if not isinstance(from_encoding, (list, tuple)):
-        from_encoding = [from_encoding]
-
-    try:
-        return unicode(str_)
-    except UnicodeDecodeError:
-        pass
-
-    for enc in from_encoding:
-        try:
-            return unicode(str_, enc)
-        except UnicodeDecodeError:
-            pass
-
-    try:
-        import chardet
-        encoding = chardet.detect(str_)['encoding']
-        if encoding is None:
-            raise Exception()
-        return str_.decode(encoding)
-    except (ImportError, UnicodeDecodeError, Exception):
-        return unicode(str_, from_encoding[0], 'replace')
-
-
-def safe_str(unicode_, to_encoding=None):
-    """
-    safe str function. Does few trick to turn unicode_ into string
-
-    In case of UnicodeEncodeError we try to return it with encoding detected
-    by chardet library if it fails fallback to string with errors replaced
-
-    :param unicode_: unicode to encode
-    :rtype: str
-    :returns: str object
-    """
-
-    # if it's not basestr cast to str
-    if not isinstance(unicode_, basestring):
-        return str(unicode_)
-
-    if isinstance(unicode_, str):
-        return unicode_
-
-    if not to_encoding:
-        import kallithea
-        DEFAULT_ENCODINGS = aslist(kallithea.CONFIG.get('default_encoding',
-                                                        'utf-8'), sep=',')
-        to_encoding = DEFAULT_ENCODINGS
-
-    if not isinstance(to_encoding, (list, tuple)):
-        to_encoding = [to_encoding]
-
-    for enc in to_encoding:
-        try:
-            return unicode_.encode(enc)
-        except UnicodeEncodeError:
-            pass
-
-    try:
-        import chardet
-        encoding = chardet.detect(unicode_)['encoding']
-        if encoding is None:
-            raise UnicodeEncodeError()
-
-        return unicode_.encode(encoding)
-    except (ImportError, UnicodeEncodeError):
-        return unicode_.encode(to_encoding[0], 'replace')
-
-
 def remove_suffix(s, suffix):
     if s.endswith(suffix):
         s = s[:-1 * len(suffix)]
@@ -271,8 +190,8 @@
 
     :param prevdate: datetime object
     :param show_short_version: if it should approximate the date and return a shorter string
-    :rtype: unicode
-    :returns: unicode words describing age
+    :rtype: str
+    :returns: str words describing age
     """
     now = now or datetime.datetime.now()
     order = ['year', 'month', 'day', 'hour', 'minute', 'second']
@@ -331,12 +250,12 @@
 
     # Format the result
     fmt_funcs = {
-        'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
-        'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
-        'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
-        'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
-        'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
-        'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
+        'year': lambda d: ungettext('%d year', '%d years', d) % d,
+        'month': lambda d: ungettext('%d month', '%d months', d) % d,
+        'day': lambda d: ungettext('%d day', '%d days', d) % d,
+        'hour': lambda d: ungettext('%d hour', '%d hours', d) % d,
+        'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d,
+        'second': lambda d: ungettext('%d second', '%d seconds', d) % d,
     }
 
     for i, part in enumerate(order):
@@ -370,7 +289,7 @@
     Removes user:password from given url string
 
     :param uri:
-    :rtype: unicode
+    :rtype: str
     :returns: filtered list of strings
     """
     if not uri:
@@ -394,7 +313,7 @@
     else:
         host, port = uri[:cred_pos], uri[cred_pos + 1:]
 
-    return filter(None, [proto, host, port])
+    return [_f for _f in [proto, host, port] if _f]
 
 
 def credentials_filter(uri):
@@ -414,19 +333,19 @@
 
 def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None):
     parsed_url = urlobject.URLObject(prefix_url)
-    prefix = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
+    prefix = urllib.parse.unquote(parsed_url.path.rstrip('/'))
     try:
         system_user = pwd.getpwuid(os.getuid()).pw_name
-    except Exception: # TODO: support all systems - especially Windows
+    except NameError: # TODO: support all systems - especially Windows
         system_user = 'kallithea' # hardcoded default value ...
     args = {
         'scheme': parsed_url.scheme,
-        'user': safe_unicode(urllib.quote(safe_str(username or ''))),
+        'user': urllib.parse.quote(username or ''),
         'netloc': parsed_url.netloc + prefix,  # like "hostname:port/prefix" (with optional ":port" and "/prefix")
         'prefix': prefix, # undocumented, empty or starting with /
         'repo': repo_name,
         'repoid': str(repo_id),
-        'system_user': safe_unicode(system_user),
+        'system_user': system_user,
         'hostname': parsed_url.hostname,
     }
     url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl)
@@ -436,7 +355,7 @@
     if not url_obj.username:
         url_obj = url_obj.with_username(None)
 
-    return safe_unicode(url_obj)
+    return str(url_obj)
 
 
 def get_changeset_safe(repo, rev):
@@ -468,7 +387,7 @@
 
 def time_to_datetime(tm):
     if tm:
-        if isinstance(tm, basestring):
+        if isinstance(tm, str):
             try:
                 tm = float(tm)
             except ValueError:
@@ -577,10 +496,10 @@
     defined, else returns None.
     """
     from tg import tmpl_context
-    if hasattr(tmpl_context, 'authuser'):
-        return tmpl_context.authuser
-
-    return None
+    try:
+        return getattr(tmpl_context, 'authuser', None)
+    except TypeError:  # No object (name: context) has been registered for this thread
+        return None
 
 
 class OptionalAttr(object):
@@ -653,7 +572,7 @@
 
 
 def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub):
-    return _cleanstringsub('_', safe_str(s)).rstrip('_')
+    return _cleanstringsub('_', s).rstrip('_')
 
 
 def recursive_replace(str_, replace=' '):
@@ -694,7 +613,7 @@
 
 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
     while True:
-        ok = raw_input(prompt)
+        ok = input(prompt)
         if ok in ('y', 'ye', 'yes'):
             return True
         if ok in ('n', 'no', 'nop', 'nope'):
--- a/kallithea/lib/vcs/backends/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -9,7 +9,6 @@
     :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 """
 import os
-from pprint import pformat
 
 from kallithea.lib.vcs.conf import settings
 from kallithea.lib.vcs.exceptions import VCSError
@@ -51,7 +50,7 @@
     """
     if alias not in settings.BACKENDS:
         raise VCSError("Given alias '%s' is not recognized! Allowed aliases:\n"
-            "%s" % (alias, pformat(settings.BACKENDS.keys())))
+            "%s" % (alias, '", "'.join(settings.BACKENDS)))
     backend_path = settings.BACKENDS[alias]
     klass = import_class(backend_path)
     return klass
--- a/kallithea/lib/vcs/backends/base.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/base.py	Sat May 02 21:20:43 2020 +0200
@@ -13,9 +13,9 @@
 import itertools
 
 from kallithea.lib.vcs.conf import settings
-from kallithea.lib.vcs.exceptions import (
-    ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError)
-from kallithea.lib.vcs.utils import author_email, author_name, safe_unicode
+from kallithea.lib.vcs.exceptions import (ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError,
+                                          NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError)
+from kallithea.lib.vcs.utils import author_email, author_name
 from kallithea.lib.vcs.utils.helpers import get_dict_for_attrs
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 
@@ -98,10 +98,6 @@
         """
         raise NotImplementedError
 
-    @property
-    def name_unicode(self):
-        return safe_unicode(self.name)
-
     @LazyProperty
     def owner(self):
         raise NotImplementedError
@@ -173,14 +169,9 @@
         """
         raise NotImplementedError
 
-    def __getslice__(self, i, j):
-        """
-        Returns a iterator of sliced repository
-        """
-        for rev in self.revisions[i:j]:
-            yield self.get_changeset(rev)
-
     def __getitem__(self, key):
+        if isinstance(key, slice):
+            return (self.get_changeset(rev) for rev in self.revisions[key])
         return self.get_changeset(key)
 
     def count(self):
@@ -267,8 +258,6 @@
         """
         Persists current changes made on this repository and returns newly
         created changeset.
-
-        :raises ``NothingChangedError``: if no changes has been made
         """
         raise NotImplementedError
 
@@ -329,9 +318,6 @@
         ``repository``
             repository object within which changeset exists
 
-        ``id``
-            may be ``raw_id`` or i.e. for mercurial's tip just ``tip``
-
         ``raw_id``
             raw changeset representation (i.e. full 40 length sha for git
             backend)
@@ -354,10 +340,10 @@
             combined list of ``Node`` objects
 
         ``author``
-            author of the changeset, as unicode
+            author of the changeset, as str
 
         ``message``
-            message of the changeset, as unicode
+            message of the changeset, as str
 
         ``parents``
             list of parent changesets
@@ -374,10 +360,9 @@
     def __repr__(self):
         return self.__str__()
 
-    def __unicode__(self):
-        return u'%s:%s' % (self.revision, self.short_id)
-
     def __eq__(self, other):
+        if type(self) is not type(other):
+            return False
         return self.raw_id == other.raw_id
 
     def __json__(self, with_file_list=False):
@@ -389,9 +374,9 @@
                 message=self.message,
                 date=self.date,
                 author=self.author,
-                added=[safe_unicode(el.path) for el in self.added],
-                changed=[safe_unicode(el.path) for el in self.changed],
-                removed=[safe_unicode(el.path) for el in self.removed],
+                added=[el.path for el in self.added],
+                changed=[el.path for el in self.changed],
+                removed=[el.path for el in self.removed],
             )
         else:
             return dict(
@@ -424,13 +409,6 @@
         raise NotImplementedError
 
     @LazyProperty
-    def id(self):
-        """
-        Returns string identifying this changeset.
-        """
-        raise NotImplementedError
-
-    @LazyProperty
     def raw_id(self):
         """
         Returns raw string identifying this changeset.
@@ -660,12 +638,12 @@
         """
         Returns dictionary with changeset's attributes and their values.
         """
-        data = get_dict_for_attrs(self, ['id', 'raw_id', 'short_id',
+        data = get_dict_for_attrs(self, ['raw_id', 'short_id',
             'revision', 'date', 'message'])
         data['author'] = {'name': self.author_name, 'email': self.author_email}
-        data['added'] = [safe_unicode(node.path) for node in self.added]
-        data['changed'] = [safe_unicode(node.path) for node in self.changed]
-        data['removed'] = [safe_unicode(node.path) for node in self.removed]
+        data['added'] = [node.path for node in self.added]
+        data['changed'] = [node.path for node in self.changed]
+        data['removed'] = [node.path for node in self.removed]
         return data
 
     @LazyProperty
@@ -936,18 +914,18 @@
                         "at %s" % (node.path, p))
 
         # Check nodes marked as changed
-        missing = set(self.changed)
-        not_changed = set(self.changed)
+        missing = set(node.path for node in self.changed)
+        not_changed = set(node.path for node in self.changed)
         if self.changed and not parents:
-            raise NodeDoesNotExistError(str(self.changed[0].path))
+            raise NodeDoesNotExistError(self.changed[0].path)
         for p in parents:
             for node in self.changed:
                 try:
                     old = p.get_node(node.path)
-                    missing.remove(node)
+                    missing.remove(node.path)
                     # if content actually changed, remove node from unchanged
                     if old.content != node.content:
-                        not_changed.remove(node)
+                        not_changed.remove(node.path)
                 except NodeDoesNotExistError:
                     pass
         if self.changed and missing:
@@ -956,7 +934,7 @@
 
         if self.changed and not_changed:
             raise NodeNotChangedError("Node at %s wasn't actually changed "
-                "since parents' changesets: %s" % (not_changed.pop().path,
+                "since parents' changesets: %s" % (not_changed.pop(),
                     parents)
             )
 
@@ -969,10 +947,10 @@
             for node in self.removed:
                 try:
                     p.get_node(node.path)
-                    really_removed.add(node)
+                    really_removed.add(node.path)
                 except ChangesetError:
                     pass
-        not_removed = set(self.removed) - really_removed
+        not_removed = list(set(node.path for node in self.removed) - really_removed)
         if not_removed:
             raise NodeDoesNotExistError("Cannot remove node at %s from "
                 "following parents: %s" % (not_removed[0], parents))
@@ -1046,7 +1024,7 @@
         return self
 
     def get_file_content(self, path):
-        return u''
+        return b''
 
     def get_file_size(self, path):
         return 0
--- a/kallithea/lib/vcs/backends/git/changeset.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/git/changeset.py	Sat May 02 21:20:43 2020 +0200
@@ -9,38 +9,36 @@
 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 from kallithea.lib.vcs.conf import settings
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, RepositoryError, VCSError
-from kallithea.lib.vcs.nodes import (
-    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
-from kallithea.lib.vcs.utils import date_fromtimestamp, safe_int, safe_str, safe_unicode
+from kallithea.lib.vcs.nodes import (AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode,
+                                     SubModuleNode)
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_int, safe_str
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 
 
 class GitChangeset(BaseChangeset):
     """
-    Represents state of the repository at single revision.
+    Represents state of the repository at a revision.
     """
 
     def __init__(self, repository, revision):
         self._stat_modes = {}
         self.repository = repository
-        revision = safe_str(revision)
         try:
-            commit = self.repository._repo[revision]
+            commit = self.repository._repo[ascii_bytes(revision)]
             if isinstance(commit, objects.Tag):
                 revision = safe_str(commit.object[1])
                 commit = self.repository._repo.get_object(commit.object[1])
         except KeyError:
             raise RepositoryError("Cannot get object with id %s" % revision)
-        self.raw_id = revision
-        self.id = self.raw_id
+        self.raw_id = ascii_str(commit.id)
         self.short_id = self.raw_id[:12]
-        self._commit = commit
+        self._commit = commit  # a Dulwich Commmit with .id
         self._tree_id = commit.tree
         self._committer_property = 'committer'
         self._author_property = 'author'
         self._date_property = 'commit_time'
         self._date_tz_property = 'commit_timezone'
-        self.revision = repository.revisions.index(revision)
+        self.revision = repository.revisions.index(self.raw_id)
 
         self.nodes = {}
         self._paths = {}
@@ -51,15 +49,15 @@
 
     @LazyProperty
     def message(self):
-        return safe_unicode(self._commit.message)
+        return safe_str(self._commit.message)
 
     @LazyProperty
     def committer(self):
-        return safe_unicode(getattr(self._commit, self._committer_property))
+        return safe_str(getattr(self._commit, self._committer_property))
 
     @LazyProperty
     def author(self):
-        return safe_unicode(getattr(self._commit, self._author_property))
+        return safe_str(getattr(self._commit, self._author_property))
 
     @LazyProperty
     def date(self):
@@ -80,7 +78,7 @@
     @LazyProperty
     def tags(self):
         _tags = []
-        for tname, tsha in self.repository.tags.iteritems():
+        for tname, tsha in self.repository.tags.items():
             if tsha == self.raw_id:
                 _tags.append(tname)
         return _tags
@@ -91,26 +89,16 @@
         # that might not make sense in Git where branches() is a better match
         # for the basic model
         heads = self.repository._heads(reverse=False)
-        ref = heads.get(self.raw_id)
+        ref = heads.get(self._commit.id)
         if ref:
-            return safe_unicode(ref)
+            return safe_str(ref)
 
     @LazyProperty
     def branches(self):
         heads = self.repository._heads(reverse=True)
-        return [b for b in heads if heads[b] == self.raw_id] # FIXME: Inefficient ... and returning None!
-
-    def _fix_path(self, path):
-        """
-        Paths are stored without trailing slash so we need to get rid off it if
-        needed.
-        """
-        if path.endswith('/'):
-            path = path.rstrip('/')
-        return path
+        return [safe_str(b) for b in heads if heads[b] == self._commit.id] # FIXME: Inefficient ... and returning None!
 
     def _get_id_for_path(self, path):
-        path = safe_str(path)
         # FIXME: Please, spare a couple of minutes and make those codes cleaner;
         if path not in self._paths:
             path = path.strip('/')
@@ -124,11 +112,10 @@
             curdir = ''
 
             # initially extract things from root dir
-            for item, stat, id in tree.iteritems():
+            for item, stat, id in tree.items():
+                name = safe_str(item)
                 if curdir:
-                    name = '/'.join((curdir, item))
-                else:
-                    name = item
+                    name = '/'.join((curdir, name))
                 self._paths[name] = id
                 self._stat_modes[name] = stat
 
@@ -138,8 +125,9 @@
                 else:
                     curdir = dir
                 dir_id = None
-                for item, stat, id in tree.iteritems():
-                    if dir == item:
+                for item, stat, id in tree.items():
+                    name = safe_str(item)
+                    if dir == name:
                         dir_id = id
                 if dir_id:
                     # Update tree
@@ -150,17 +138,16 @@
                     raise ChangesetError('%s have not been found' % curdir)
 
                 # cache all items from the given traversed tree
-                for item, stat, id in tree.iteritems():
+                for item, stat, id in tree.items():
+                    name = safe_str(item)
                     if curdir:
-                        name = '/'.join((curdir, item))
-                    else:
-                        name = item
+                        name = '/'.join((curdir, name))
                     self._paths[name] = id
                     self._stat_modes[name] = stat
             if path not in self._paths:
                 raise NodeDoesNotExistError("There is no file nor directory "
                     "at the given path '%s' at revision %s"
-                    % (path, safe_str(self.short_id)))
+                    % (path, self.short_id))
         return self._paths[path]
 
     def _get_kind(self, path):
@@ -171,7 +158,7 @@
             return NodeKind.DIR
 
     def _get_filectx(self, path):
-        path = self._fix_path(path)
+        path = path.rstrip('/')
         if self._get_kind(path) != NodeKind.FILE:
             raise ChangesetError("File does not exist for revision %s at "
                 " '%s'" % (self.raw_id, path))
@@ -185,8 +172,8 @@
         """
         Returns list of parents changesets.
         """
-        return [self.repository.get_changeset(parent)
-                for parent in self._commit.parents]
+        return [self.repository.get_changeset(ascii_str(parent_id))
+                for parent_id in self._commit.parents]
 
     @LazyProperty
     def children(self):
@@ -194,17 +181,15 @@
         Returns list of children changesets.
         """
         rev_filter = settings.GIT_REV_FILTER
-        so, se = self.repository.run_git_command(
+        so = self.repository.run_git_command(
             ['rev-list', rev_filter, '--children']
         )
-
-        children = []
-        pat = re.compile(r'^%s' % self.raw_id)
-        for l in so.splitlines():
-            if pat.match(l):
-                childs = l.split(' ')[1:]
-                children.extend(childs)
-        return [self.repository.get_changeset(cs) for cs in children]
+        return [
+            self.repository.get_changeset(cs)
+            for parts in (l.split(' ') for l in so.splitlines())
+            if parts[0] == self.raw_id
+            for cs in parts[1:]
+        ]
 
     def next(self, branch=None):
         if branch and self.branch != branch:
@@ -243,9 +228,10 @@
                 return cs
 
     def diff(self, ignore_whitespace=True, context=3):
+        # Only used to feed diffstat
         rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
         rev2 = self
-        return ''.join(self.repository.get_diff(rev1, rev2,
+        return b''.join(self.repository.get_diff(rev1, rev2,
                                     ignore_whitespace=ignore_whitespace,
                                     context=context))
 
@@ -254,7 +240,6 @@
         Returns stat mode of the file at the given ``path``.
         """
         # ensure path is traversed
-        path = safe_str(path)
         self._get_id_for_path(path)
         return self._stat_modes[path]
 
@@ -290,17 +275,15 @@
         iterating commits.
         """
         self._get_filectx(path)
-        cs_id = safe_str(self.id)
-        f_path = safe_str(path)
 
         if limit is not None:
             cmd = ['log', '-n', str(safe_int(limit, 0)),
-                   '--pretty=format:%H', '-s', cs_id, '--', f_path]
+                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
 
         else:
             cmd = ['log',
-                   '--pretty=format:%H', '-s', cs_id, '--', f_path]
-        so, se = self.repository.run_git_command(cmd)
+                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
+        so = self.repository.run_git_command(cmd)
         ids = re.findall(r'[0-9a-fA-F]{40}', so)
         return [self.repository.get_changeset(sha) for sha in ids]
 
@@ -312,31 +295,29 @@
         """
         self._get_filectx(path)
         from dulwich.walk import Walker
-        include = [self.id]
+        include = [self.raw_id]
         walker = Walker(self.repository._repo.object_store, include,
                         paths=[path], max_entries=1)
-        return [self.repository.get_changeset(sha)
-                for sha in (x.commit.id for x in walker)]
+        return [self.repository.get_changeset(ascii_str(x.commit.id.decode))
+                for x in walker]
 
     def get_file_annotate(self, path):
         """
         Returns a generator of four element tuples with
             lineno, sha, changeset lazy loader and line
-
-        TODO: This function now uses os underlying 'git' command which is
-        generally not good. Should be replaced with algorithm iterating
-        commits.
         """
-        cmd = ['blame', '-l', '--root', '-r', self.id, '--', path]
+        # TODO: This function now uses os underlying 'git' command which is
+        # generally not good. Should be replaced with algorithm iterating
+        # commits.
+        cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
         # -l     ==> outputs long shas (and we need all 40 characters)
         # --root ==> doesn't put '^' character for boundaries
         # -r sha ==> blames for the given revision
-        so, se = self.repository.run_git_command(cmd)
+        so = self.repository.run_git_command(cmd)
 
         for i, blame_line in enumerate(so.split('\n')[:-1]):
-            ln_no = i + 1
             sha, line = re.split(r' ', blame_line, 1)
-            yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
+            yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line)
 
     def fill_archive(self, stream=None, kind='tgz', prefix=None,
                      subrepos=False):
@@ -353,12 +334,15 @@
 
         :raise ImproperArchiveTypeError: If given kind is wrong.
         :raise VcsError: If given stream is None
-
         """
-        allowed_kinds = settings.ARCHIVE_SPECS.keys()
+        allowed_kinds = settings.ARCHIVE_SPECS
         if kind not in allowed_kinds:
             raise ImproperArchiveTypeError('Archive kind not supported use one'
-                'of %s' % allowed_kinds)
+                'of %s' % ' '.join(allowed_kinds))
+
+        if stream is None:
+            raise VCSError('You need to pass in a valid stream for filling'
+                           ' with archival data')
 
         if prefix is None:
             prefix = '%s-%s' % (self.repository.name, self.short_id)
@@ -394,25 +378,30 @@
         popen.communicate()
 
     def get_nodes(self, path):
+        """
+        Returns combined ``DirNode`` and ``FileNode`` objects list representing
+        state of changeset at the given ``path``. If node at the given ``path``
+        is not instance of ``DirNode``, ChangesetError would be raised.
+        """
+
         if self._get_kind(path) != NodeKind.DIR:
             raise ChangesetError("Directory does not exist for revision %s at "
                 " '%s'" % (self.revision, path))
-        path = self._fix_path(path)
+        path = path.rstrip('/')
         id = self._get_id_for_path(path)
         tree = self.repository._repo[id]
         dirnodes = []
         filenodes = []
         als = self.repository.alias
-        for name, stat, id in tree.iteritems():
+        for name, stat, id in tree.items():
+            obj_path = safe_str(name)
             if path != '':
-                obj_path = '/'.join((path, name))
-            else:
-                obj_path = name
+                obj_path = '/'.join((path, obj_path))
             if objects.S_ISGITLINK(stat):
                 root_tree = self.repository._repo[self._tree_id]
-                cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(root_tree['.gitmodules'][1]).data))
-                url = cf.get(('submodule', obj_path), 'url')
-                dirnodes.append(SubModuleNode(obj_path, url=url, changeset=id,
+                cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(root_tree[b'.gitmodules'][1]).data))
+                url = ascii_str(cf.get(('submodule', obj_path), 'url'))
+                dirnodes.append(SubModuleNode(obj_path, url=url, changeset=ascii_str(id),
                                               alias=als))
                 continue
 
@@ -434,9 +423,11 @@
         return nodes
 
     def get_node(self, path):
-        if isinstance(path, unicode):
-            path = path.encode('utf-8')
-        path = self._fix_path(path)
+        """
+        Returns ``Node`` object from the given ``path``. If there is no node at
+        the given ``path``, ``ChangesetError`` would be raised.
+        """
+        path = path.rstrip('/')
         if path not in self.nodes:
             try:
                 id_ = self._get_id_for_path(path)
@@ -444,12 +435,12 @@
                 raise NodeDoesNotExistError("Cannot find one of parents' "
                     "directories for a given path: %s" % path)
 
-            _GL = lambda m: m and objects.S_ISGITLINK(m)
-            if _GL(self._stat_modes.get(path)):
+            stat = self._stat_modes.get(path)
+            if stat and objects.S_ISGITLINK(stat):
                 tree = self.repository._repo[self._tree_id]
-                cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(tree['.gitmodules'][1]).data))
-                url = cf.get(('submodule', path), 'url')
-                node = SubModuleNode(path, url=url, changeset=id_,
+                cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(tree[b'.gitmodules'][1]).data))
+                url = ascii_str(cf.get(('submodule', path), 'url'))
+                node = SubModuleNode(path, url=url, changeset=ascii_str(id_),
                                      alias=self.repository.alias)
             else:
                 obj = self.repository._repo.get_object(id_)
@@ -465,7 +456,7 @@
                     node._blob = obj
                 else:
                     raise NodeDoesNotExistError("There is no file nor directory "
-                        "at the given path '%s' at revision %s"
+                        "at the given path: '%s' at revision %s"
                         % (path, self.short_id))
             # cache node
             self.nodes[path] = node
@@ -480,16 +471,6 @@
         return list(added.union(modified).union(deleted))
 
     @LazyProperty
-    def _diff_name_status(self):
-        output = []
-        for parent in self.parents:
-            cmd = ['diff', '--name-status', parent.raw_id, self.raw_id,
-                   '--encoding=utf8']
-            so, se = self.repository.run_git_command(cmd)
-            output.append(so.strip())
-        return '\n'.join(output)
-
-    @LazyProperty
     def _changes_cache(self):
         added = set()
         modified = set()
@@ -503,15 +484,15 @@
             if isinstance(parent, EmptyChangeset):
                 oid = None
             else:
-                oid = _r[parent.raw_id].tree
-            changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
+                oid = _r[parent._commit.id].tree
+            changes = _r.object_store.tree_changes(oid, _r[self._commit.id].tree)
             for (oldpath, newpath), (_, _), (_, _) in changes:
                 if newpath and oldpath:
-                    modified.add(newpath)
+                    modified.add(safe_str(newpath))
                 elif newpath and not oldpath:
-                    added.add(newpath)
+                    added.add(safe_str(newpath))
                 elif not newpath and oldpath:
-                    deleted.add(oldpath)
+                    deleted.add(safe_str(oldpath))
         return added, modified, deleted
 
     def _get_paths_for_status(self, status):
--- a/kallithea/lib/vcs/backends/git/inmemory.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/git/inmemory.py	Sat May 02 21:20:43 2020 +0200
@@ -7,7 +7,7 @@
 
 from kallithea.lib.vcs.backends.base import BaseInMemoryChangeset
 from kallithea.lib.vcs.exceptions import RepositoryError
-from kallithea.lib.vcs.utils import safe_str
+from kallithea.lib.vcs.utils import ascii_str, safe_bytes
 
 
 class GitInMemoryChangeset(BaseInMemoryChangeset):
@@ -39,7 +39,7 @@
         repo = self.repository._repo
         object_store = repo.object_store
 
-        ENCODING = "UTF-8"
+        ENCODING = b"UTF-8"  # TODO: should probably be kept in sync with safe_str/safe_bytes and vcs/conf/settings.py DEFAULT_ENCODINGS
 
         # Create tree and populates it with blobs
         commit_tree = self.parents[0] and repo[self.parents[0]._commit.tree] or \
@@ -47,7 +47,7 @@
         for node in self.added + self.changed:
             # Compute subdirs if needed
             dirpath, nodename = posixpath.split(node.path)
-            dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
+            dirnames = safe_bytes(dirpath).split(b'/') if dirpath else []
             parent = commit_tree
             ancestors = [('', parent)]
 
@@ -68,13 +68,9 @@
             # for dirnames (in reverse order) [this only applies for nodes from added]
             new_trees = []
 
-            if not node.is_binary:
-                content = node.content.encode(ENCODING)
-            else:
-                content = node.content
-            blob = objects.Blob.from_string(content)
+            blob = objects.Blob.from_string(node.content)
 
-            node_path = node.name.encode(ENCODING)
+            node_path = safe_bytes(node.name)
             if dirnames:
                 # If there are trees which should be created we need to build
                 # them now (in reverse order)
@@ -104,7 +100,7 @@
             for tree in new_trees:
                 object_store.add_object(tree)
         for node in self.removed:
-            paths = node.path.split('/')
+            paths = safe_bytes(node.path).split(b'/')
             tree = commit_tree
             trees = [tree]
             # Traverse deep into the forest...
@@ -117,7 +113,7 @@
                 except KeyError:
                     break
             # Cut down the blob and all rotten trees on the way back...
-            for path, tree in reversed(zip(paths, trees)):
+            for path, tree in reversed(list(zip(paths, trees))):
                 del tree[path]
                 if tree:
                     # This tree still has elements - don't remove it or any
@@ -130,9 +126,9 @@
         commit = objects.Commit()
         commit.tree = commit_tree.id
         commit.parents = [p._commit.id for p in self.parents if p]
-        commit.author = commit.committer = safe_str(author)
+        commit.author = commit.committer = safe_bytes(author)
         commit.encoding = ENCODING
-        commit.message = safe_str(message)
+        commit.message = safe_bytes(message)
 
         # Compute date
         if date is None:
@@ -150,11 +146,10 @@
 
         object_store.add_object(commit)
 
-        ref = 'refs/heads/%s' % branch
+        # Update vcs repository object & recreate dulwich repo
+        ref = b'refs/heads/%s' % safe_bytes(branch)
         repo.refs[ref] = commit.id
-
-        # Update vcs repository object & recreate dulwich repo
-        self.repository.revisions.append(commit.id)
+        self.repository.revisions.append(ascii_str(commit.id))
         # invalidate parsed refs after commit
         self.repository._parsed_refs = self.repository._get_parsed_refs()
         tip = self.repository.get_changeset()
@@ -177,15 +172,15 @@
             return []
 
         def get_tree_for_dir(tree, dirname):
-            for name, mode, id in tree.iteritems():
+            for name, mode, id in tree.items():
                 if name == dirname:
                     obj = self.repository._repo[id]
                     if isinstance(obj, objects.Tree):
                         return obj
                     else:
                         raise RepositoryError("Cannot create directory %s "
-                        "at tree %s as path is occupied and is not a "
-                        "Tree" % (dirname, tree))
+                            "at tree %s as path is occupied and is not a "
+                            "Tree" % (dirname, tree))
             return None
 
         trees = []
--- a/kallithea/lib/vcs/backends/git/repository.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/git/repository.py	Sat May 02 21:20:43 2020 +0200
@@ -12,13 +12,15 @@
 import errno
 import logging
 import os
-import posixpath
 import re
 import time
-import urllib
-import urllib2
+import urllib.error
+import urllib.parse
+import urllib.request
 from collections import OrderedDict
 
+import mercurial.url  # import httpbasicauthhandler, httpdigestauthhandler
+import mercurial.util  # import url as hg_url
 from dulwich.config import ConfigFile
 from dulwich.objects import Tag
 from dulwich.repo import NotGitRepository, Repo
@@ -26,10 +28,9 @@
 from kallithea.lib.vcs import subprocessio
 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 from kallithea.lib.vcs.conf import settings
-from kallithea.lib.vcs.exceptions import (
-    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError)
-from kallithea.lib.vcs.utils import date_fromtimestamp, makedate, safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import hg_url, httpbasicauthhandler, httpdigestauthhandler
+from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
+                                          TagDoesNotExistError)
+from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 from kallithea.lib.vcs.utils.paths import abspath, get_user_home
 
@@ -53,7 +54,7 @@
     def __init__(self, repo_path, create=False, src_url=None,
                  update_after_clone=False, bare=False):
 
-        self.path = safe_unicode(abspath(repo_path))
+        self.path = abspath(repo_path)
         self.repo = self._get_repo(create, src_url, update_after_clone, bare)
         self.bare = self.repo.bare
 
@@ -97,63 +98,54 @@
         return self._get_all_revisions()
 
     @classmethod
-    def _run_git_command(cls, cmd, **opts):
+    def _run_git_command(cls, cmd, cwd=None):
         """
-        Runs given ``cmd`` as git command and returns tuple
-        (stdout, stderr).
+        Runs given ``cmd`` as git command and returns output bytes in a tuple
+        (stdout, stderr) ... or raise RepositoryError.
 
         :param cmd: git command to be executed
-        :param opts: env options to pass into Subprocess command
+        :param cwd: passed directly to subprocess
         """
-
-        if '_bare' in opts:
-            _copts = []
-            del opts['_bare']
-        else:
-            _copts = ['-c', 'core.quotepath=false', ]
-        safe_call = False
-        if '_safe' in opts:
-            # no exc on failure
-            del opts['_safe']
-            safe_call = True
-
-        assert isinstance(cmd, list), cmd
-
-        gitenv = os.environ
         # need to clean fix GIT_DIR !
-        if 'GIT_DIR' in gitenv:
-            del gitenv['GIT_DIR']
+        gitenv = dict(os.environ)
+        gitenv.pop('GIT_DIR', None)
         gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 
-        _git_path = settings.GIT_EXECUTABLE_PATH
-        cmd = [_git_path] + _copts + cmd
+        assert isinstance(cmd, list), cmd
+        cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd
+        try:
+            p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False)
+        except (EnvironmentError, OSError) as err:
+            # output from the failing process is in str(EnvironmentError)
+            msg = ("Couldn't run git command %s.\n"
+                   "Subprocess failed with '%s': %s\n" %
+                   (cmd, type(err).__name__, err)
+            ).strip()
+            log.error(msg)
+            raise RepositoryError(msg)
 
         try:
-            _opts = dict(
-                env=gitenv,
-                shell=False,
-            )
-            _opts.update(opts)
-            p = subprocessio.SubprocessIOChunker(cmd, **_opts)
-        except (EnvironmentError, OSError) as err:
-            tb_err = ("Couldn't run git command (%s).\n"
-                      "Original error was:%s\n" % (cmd, err))
-            log.error(tb_err)
-            if safe_call:
-                return '', err
-            else:
-                raise RepositoryError(tb_err)
-
-        try:
-            return ''.join(p.output), ''.join(p.error)
+            stdout = b''.join(p.output)
+            stderr = b''.join(p.error)
         finally:
             p.close()
+        # TODO: introduce option to make commands fail if they have any stderr output?
+        if stderr:
+            log.debug('stderr from %s:\n%s', cmd, stderr)
+        else:
+            log.debug('stderr from %s: None', cmd)
+        return stdout, stderr
 
     def run_git_command(self, cmd):
-        opts = {}
+        """
+        Runs given ``cmd`` as git command with cwd set to current repo.
+        Returns stdout as unicode str ... or raise RepositoryError.
+        """
+        cwd = None
         if os.path.isdir(self.path):
-            opts['cwd'] = self.path
-        return self._run_git_command(cmd, **opts)
+            cwd = self.path
+        stdout, _stderr = self._run_git_command(cmd, cwd=cwd)
+        return safe_str(stdout)
 
     @classmethod
     def _check_url(cls, url):
@@ -166,7 +158,6 @@
         On failures it'll raise urllib2.HTTPError, exception is also thrown
         when the return code is non 200
         """
-
         # check first if it's not an local url
         if os.path.isdir(url) or url.startswith('file:'):
             return True
@@ -178,29 +169,30 @@
             url = url[url.find('+') + 1:]
 
         handlers = []
-        url_obj = hg_url(url)
+        url_obj = mercurial.util.url(safe_bytes(url))
         test_uri, authinfo = url_obj.authinfo()
-        url_obj.passwd = '*****'
+        if not test_uri.endswith(b'info/refs'):
+            test_uri = test_uri.rstrip(b'/') + b'/info/refs'
+
+        url_obj.passwd = b'*****'
         cleaned_uri = str(url_obj)
 
-        if not test_uri.endswith('info/refs'):
-            test_uri = test_uri.rstrip('/') + '/info/refs'
-
         if authinfo:
             # create a password manager
-            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+            passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
             passmgr.add_password(*authinfo)
 
-            handlers.extend((httpbasicauthhandler(passmgr),
-                             httpdigestauthhandler(passmgr)))
+            handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
+                             mercurial.url.httpdigestauthhandler(passmgr)))
 
-        o = urllib2.build_opener(*handlers)
+        o = urllib.request.build_opener(*handlers)
         o.addheaders = [('User-Agent', 'git/1.7.8.0')]  # fake some git
 
-        q = {"service": 'git-upload-pack'}
-        qs = '?%s' % urllib.urlencode(q)
-        cu = "%s%s" % (test_uri, qs)
-        req = urllib2.Request(cu, None, {})
+        req = urllib.request.Request(
+            "%s?%s" % (
+                safe_str(test_uri),
+                urllib.parse.urlencode({"service": 'git-upload-pack'})
+            ))
 
         try:
             resp = o.open(req)
@@ -208,13 +200,13 @@
                 raise Exception('Return Code is not 200')
         except Exception as e:
             # means it cannot be cloned
-            raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
+            raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
 
         # now detect if it's proper git repo
         gitdata = resp.read()
-        if 'service=git-upload-pack' not in gitdata:
-            raise urllib2.URLError(
-                "url [%s] does not look like an git" % (cleaned_uri))
+        if b'service=git-upload-pack' not in gitdata:
+            raise urllib.error.URLError(
+                "url [%s] does not look like an git" % cleaned_uri)
 
         return True
 
@@ -253,7 +245,7 @@
         rev_filter = settings.GIT_REV_FILTER
         cmd = ['rev-list', rev_filter, '--reverse', '--date-order']
         try:
-            so, se = self.run_git_command(cmd)
+            so = self.run_git_command(cmd)
         except RepositoryError:
             # Can be raised for empty repositories
             return []
@@ -261,58 +253,56 @@
 
     def _get_all_revisions2(self):
         # alternate implementation using dulwich
-        includes = [x[1][0] for x in self._parsed_refs.iteritems()
-                    if x[1][1] != 'T']
+        includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items()
+                    if type_ != b'T']
         return [c.commit.id for c in self._repo.get_walker(include=includes)]
 
     def _get_revision(self, revision):
         """
-        For git backend we always return integer here. This way we ensure
-        that changeset's revision attribute would become integer.
+        Given any revision identifier, returns a 40 char string with revision hash.
         """
-
-        is_null = lambda o: len(o) == revision.count('0')
-
         if self._empty:
             raise EmptyRepositoryError("There are no changesets yet")
 
         if revision in (None, '', 'tip', 'HEAD', 'head', -1):
-            return self.revisions[-1]
+            revision = -1
 
-        is_bstr = isinstance(revision, (str, unicode))
-        if ((is_bstr and revision.isdigit() and len(revision) < 12)
-            or isinstance(revision, int) or is_null(revision)
-        ):
+        if isinstance(revision, int):
             try:
-                revision = self.revisions[int(revision)]
+                return self.revisions[revision]
             except IndexError:
-                msg = ("Revision %s does not exist for %s" % (revision, self))
+                msg = "Revision %r does not exist for %s" % (revision, self.name)
                 raise ChangesetDoesNotExistError(msg)
 
-        elif is_bstr:
-            # get by branch/tag name
-            _ref_revision = self._parsed_refs.get(revision)
-            if _ref_revision:  # and _ref_revision[1] in ['H', 'RH', 'T']:
-                return _ref_revision[0]
+        if isinstance(revision, str):
+            if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')):
+                try:
+                    return self.revisions[int(revision)]
+                except IndexError:
+                    msg = "Revision %r does not exist for %s" % (revision, self)
+                    raise ChangesetDoesNotExistError(msg)
 
-            _tags_shas = self.tags.values()
+            # get by branch/tag name
+            _ref_revision = self._parsed_refs.get(safe_bytes(revision))
+            if _ref_revision:  # and _ref_revision[1] in [b'H', b'RH', b'T']:
+                return ascii_str(_ref_revision[0])
+
+            if revision in self.revisions:
+                return revision
+
             # maybe it's a tag ? we don't have them in self.revisions
-            if revision in _tags_shas:
-                return _tags_shas[_tags_shas.index(revision)]
+            if revision in self.tags.values():
+                return revision
 
-            elif not SHA_PATTERN.match(revision) or revision not in self.revisions:
-                msg = ("Revision %s does not exist for %s" % (revision, self))
+            if SHA_PATTERN.match(revision):
+                msg = "Revision %r does not exist for %s" % (revision, self.name)
                 raise ChangesetDoesNotExistError(msg)
 
-        # Ensure we return full id
-        if not SHA_PATTERN.match(str(revision)):
-            raise ChangesetDoesNotExistError("Given revision %s not recognized"
-                % revision)
-        return revision
+        raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision)
 
     def get_ref_revision(self, ref_type, ref_name):
         """
-        Returns ``MercurialChangeset`` object representing repository's
+        Returns ``GitChangeset`` object representing repository's
         changeset at the given ``revision``.
         """
         return self._get_revision(ref_name)
@@ -327,20 +317,10 @@
         Returns normalized url. If schema is not given, would fall to
         filesystem (``file:///``) schema.
         """
-        url = safe_str(url)
         if url != 'default' and '://' not in url:
             url = ':///'.join(('file', url))
         return url
 
-    def get_hook_location(self):
-        """
-        returns absolute path to location where hooks are stored
-        """
-        loc = os.path.join(self.path, 'hooks')
-        if not self.bare:
-            loc = os.path.join(self.path, '.git', 'hooks')
-        return loc
-
     @LazyProperty
     def name(self):
         return os.path.basename(self.path)
@@ -367,23 +347,20 @@
 
     @LazyProperty
     def description(self):
-        undefined_description = u'unknown'
-        _desc = self._repo.get_description()
-        return safe_unicode(_desc or undefined_description)
+        return safe_str(self._repo.get_description() or b'unknown')
 
     @LazyProperty
     def contact(self):
-        undefined_contact = u'Unknown'
+        undefined_contact = 'Unknown'
         return undefined_contact
 
     @property
     def branches(self):
         if not self.revisions:
             return {}
-        sortkey = lambda ctx: ctx[0]
-        _branches = [(x[0], x[1][0])
-                     for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
-        return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
+        _branches = [(safe_str(key), ascii_str(sha))
+                     for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H']
+        return OrderedDict(sorted(_branches, key=(lambda ctx: ctx[0]), reverse=False))
 
     @LazyProperty
     def closed_branches(self):
@@ -396,11 +373,9 @@
     def _get_tags(self):
         if not self.revisions:
             return {}
-
-        sortkey = lambda ctx: ctx[0]
-        _tags = [(x[0], x[1][0])
-                 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
-        return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
+        _tags = [(safe_str(key), ascii_str(sha))
+                 for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T']
+        return OrderedDict(sorted(_tags, key=(lambda ctx: ctx[0]), reverse=True))
 
     def tag(self, name, user, revision=None, message=None, date=None,
             **kwargs):
@@ -420,7 +395,7 @@
         changeset = self.get_changeset(revision)
         message = message or "Added tag %s for commit %s" % (name,
             changeset.raw_id)
-        self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
+        self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id
 
         self._parsed_refs = self._get_parsed_refs()
         self.tags = self._get_tags()
@@ -439,7 +414,8 @@
         """
         if name not in self.tags:
             raise TagDoesNotExistError("Tag %s does not exist" % name)
-        tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
+        # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git'
+        tagpath = os.path.join(safe_str(self._repo.refs.path), 'refs', 'tags', name)
         try:
             os.remove(tagpath)
             self._parsed_refs = self._get_parsed_refs()
@@ -459,18 +435,20 @@
         return self._get_parsed_refs()
 
     def _get_parsed_refs(self):
-        # cache the property
+        """Return refs as a dict, like:
+        { b'v0.2.0': [b'599ba911aa24d2981225f3966eb659dfae9e9f30', b'T'] }
+        """
         _repo = self._repo
         refs = _repo.get_refs()
-        keys = [('refs/heads/', 'H'),
-                ('refs/remotes/origin/', 'RH'),
-                ('refs/tags/', 'T')]
+        keys = [(b'refs/heads/', b'H'),
+                (b'refs/remotes/origin/', b'RH'),
+                (b'refs/tags/', b'T')]
         _refs = {}
-        for ref, sha in refs.iteritems():
+        for ref, sha in refs.items():
             for k, type_ in keys:
                 if ref.startswith(k):
                     _key = ref[len(k):]
-                    if type_ == 'T':
+                    if type_ == b'T':
                         obj = _repo.get_object(sha)
                         if isinstance(obj, Tag):
                             sha = _repo.get_object(sha).object[1]
@@ -483,13 +461,13 @@
         heads = {}
 
         for key, val in refs.items():
-            for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
+            for ref_key in [b'refs/heads/', b'refs/remotes/origin/']:
                 if key.startswith(ref_key):
                     n = key[len(ref_key):]
-                    if n not in ['HEAD']:
+                    if n not in [b'HEAD']:
                         heads[n] = val
 
-        return heads if reverse else dict((y, x) for x, y in heads.iteritems())
+        return heads if reverse else dict((y, x) for x, y in heads.items())
 
     def get_changeset(self, revision=None):
         """
@@ -498,9 +476,7 @@
         """
         if isinstance(revision, GitChangeset):
             return revision
-        revision = self._get_revision(revision)
-        changeset = GitChangeset(repository=self, revision=revision)
-        return changeset
+        return GitChangeset(repository=self, revision=self._get_revision(revision))
 
     def get_changesets(self, start=None, end=None, start_date=None,
            end_date=None, branch_name=None, reverse=False, max_revisions=None):
@@ -547,7 +523,7 @@
         else:
             cmd.append(settings.GIT_REV_FILTER)
 
-        revs = self.run_git_command(cmd)[0].splitlines()
+        revs = self.run_git_command(cmd).splitlines()
         start_pos = 0
         end_pos = len(revs)
         if start:
@@ -572,14 +548,15 @@
 
         revs = revs[start_pos:end_pos]
         if reverse:
-            revs = reversed(revs)
+            revs.reverse()
+
         return CollectionGenerator(self, revs)
 
     def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
                  context=3):
         """
-        Returns (git like) *diff*, as plain text. Shows changes introduced by
-        ``rev2`` since ``rev1``.
+        Returns (git like) *diff*, as plain bytes text. Shows changes
+        introduced by ``rev2`` since ``rev1``.
 
         :param rev1: Entry point from which diff is shown. Can be
           ``self.EMPTY_CHANGESET`` - in this case, patch showing all
@@ -633,14 +610,13 @@
         if path:
             cmd += ['--', path]
 
-        stdout, stderr = self.run_git_command(cmd)
-        # TODO: don't ignore stderr
+        stdout, stderr = self._run_git_command(cmd, cwd=self.path)
         # If we used 'show' command, strip first few lines (until actual diff
         # starts)
         if rev1 == self.EMPTY_CHANGESET:
-            parts = stdout.split('\ndiff ', 1)
+            parts = stdout.split(b'\ndiff ', 1)
             if len(parts) > 1:
-                stdout = 'diff ' + parts[1]
+                stdout = b'diff ' + parts[1]
         return stdout
 
     @LazyProperty
@@ -683,7 +659,7 @@
         Tries to pull changes from external location.
         """
         url = self._get_url(url)
-        so, se = self.run_git_command(['ls-remote', '-h', url])
+        so = self.run_git_command(['ls-remote', '-h', url])
         cmd = ['fetch', url, '--']
         for line in (x for x in so.splitlines()):
             sha, ref = line.split('\t')
@@ -721,7 +697,7 @@
         """
         if config_file is None:
             config_file = []
-        elif isinstance(config_file, basestring):
+        elif isinstance(config_file, str):
             config_file = [config_file]
 
         def gen_configs():
@@ -733,9 +709,10 @@
 
         for config in gen_configs():
             try:
-                return config.get(section, name)
+                value = config.get(section, name)
             except KeyError:
                 continue
+            return None if value is None else safe_str(value)
         return None
 
     def get_user_name(self, config_file=None):
--- a/kallithea/lib/vcs/backends/git/ssh.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/git/ssh.py	Sat May 02 21:20:43 2020 +0200
@@ -17,7 +17,6 @@
 
 from kallithea.lib.hooks import log_pull_action
 from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_str, safe_unicode
 from kallithea.lib.vcs.backends.ssh import BaseSshHandler
 
 
@@ -33,15 +32,15 @@
         >>> import shlex
 
         >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).repo_name
-        u'foo bar'
+        'foo bar'
         >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).verb
         'git-upload-pack'
         >>> GitSshHandler.make(shlex.split(" git-upload-pack /blåbærgrød ")).repo_name # might not be necessary to support no quoting ... but we can
-        u'bl\xe5b\xe6rgr\xf8d'
+        'bl\xe5b\xe6rgr\xf8d'
         >>> GitSshHandler.make(shlex.split('''git-upload-pack "/foo'bar"''')).repo_name
-        u"foo'bar"
+        "foo'bar"
         >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).repo_name
-        u'foo'
+        'foo'
         >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).verb
         'git-receive-pack'
 
@@ -56,7 +55,7 @@
             ssh_command_parts[0] in ['git-upload-pack', 'git-receive-pack'] and
             ssh_command_parts[1].startswith('/')
         ):
-            return cls(safe_unicode(ssh_command_parts[1][1:]), ssh_command_parts[0])
+            return cls(ssh_command_parts[1][1:], ssh_command_parts[0])
 
         return None
 
@@ -70,7 +69,7 @@
             log_pull_action(ui=make_ui(), repo=self.db_repo.scm_instance._repo)
         else: # probably verb 'git-receive-pack', action 'push'
             if not self.allow_push:
-                self.exit('Push access to %r denied' % safe_str(self.repo_name))
+                self.exit('Push access to %r denied' % self.repo_name)
             # Note: push logging is handled by Git post-receive hook
 
         # git shell is not a real shell but use shell inspired quoting *inside* the argument.
--- a/kallithea/lib/vcs/backends/git/workdir.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/git/workdir.py	Sat May 02 21:20:43 2020 +0200
@@ -1,5 +1,6 @@
 import re
 
+from kallithea.lib.utils2 import ascii_str, safe_str
 from kallithea.lib.vcs.backends.base import BaseWorkdir
 from kallithea.lib.vcs.exceptions import BranchDoesNotExistError, RepositoryError
 
@@ -7,9 +8,9 @@
 class GitWorkdir(BaseWorkdir):
 
     def get_branch(self):
-        headpath = self.repository._repo.refs.refpath('HEAD')
+        headpath = self.repository._repo.refs.refpath(b'HEAD')
         try:
-            content = open(headpath).read()
+            content = safe_str(open(headpath, 'rb').read())
             match = re.match(r'^ref: refs/heads/(?P<branch>.+)\n$', content)
             if match:
                 return match.groupdict()['branch']
@@ -20,7 +21,7 @@
             raise RepositoryError("Couldn't compute workdir's branch")
 
     def get_changeset(self):
-        wk_dir_id = self.repository._repo.refs.as_dict().get('HEAD')
+        wk_dir_id = ascii_str(self.repository._repo.refs.as_dict().get(b'HEAD'))
         return self.repository.get_changeset(wk_dir_id)
 
     def checkout_branch(self, branch=None):
--- a/kallithea/lib/vcs/backends/hg/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/hg/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -9,6 +9,8 @@
     :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 """
 
+from kallithea.lib.vcs.utils import hgcompat
+
 from .changeset import MercurialChangeset
 from .inmemory import MercurialInMemoryChangeset
 from .repository import MercurialRepository
@@ -19,3 +21,5 @@
     'MercurialRepository', 'MercurialChangeset',
     'MercurialInMemoryChangeset', 'MercurialWorkdir',
 ]
+
+hgcompat.monkey_do()
--- a/kallithea/lib/vcs/backends/hg/changeset.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/hg/changeset.py	Sat May 02 21:20:43 2020 +0200
@@ -1,41 +1,44 @@
 import os
 import posixpath
 
+import mercurial.archival
+import mercurial.node
+import mercurial.obsutil
+
 from kallithea.lib.vcs.backends.base import BaseChangeset
 from kallithea.lib.vcs.conf import settings
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError
-from kallithea.lib.vcs.nodes import (
-    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
-from kallithea.lib.vcs.utils import date_fromtimestamp, safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import archival, hex, obsutil
+from kallithea.lib.vcs.nodes import (AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode,
+                                     SubModuleNode)
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_bytes, safe_str
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 from kallithea.lib.vcs.utils.paths import get_dirs_for_path
 
 
 class MercurialChangeset(BaseChangeset):
     """
-    Represents state of the repository at the single revision.
+    Represents state of the repository at a revision.
     """
 
     def __init__(self, repository, revision):
         self.repository = repository
-        assert isinstance(revision, basestring), repr(revision)
-        self.raw_id = revision
-        self._ctx = repository._repo[revision]
+        assert isinstance(revision, str), repr(revision)
+        self._ctx = repository._repo[ascii_bytes(revision)]
+        self.raw_id = ascii_str(self._ctx.hex())
         self.revision = self._ctx._rev
         self.nodes = {}
 
     @LazyProperty
     def tags(self):
-        return map(safe_unicode, self._ctx.tags())
+        return [safe_str(tag) for tag in self._ctx.tags()]
 
     @LazyProperty
     def branch(self):
-        return safe_unicode(self._ctx.branch())
+        return safe_str(self._ctx.branch())
 
     @LazyProperty
     def branches(self):
-        return [safe_unicode(self._ctx.branch())]
+        return [safe_str(self._ctx.branch())]
 
     @LazyProperty
     def closesbranch(self):
@@ -47,17 +50,11 @@
 
     @LazyProperty
     def bumped(self):
-        try:
-            return self._ctx.phasedivergent()
-        except AttributeError: # renamed in Mercurial 4.6 (9fa874fb34e1)
-            return self._ctx.bumped()
+        return self._ctx.phasedivergent()
 
     @LazyProperty
     def divergent(self):
-        try:
-            return self._ctx.contentdivergent()
-        except AttributeError: # renamed in Mercurial 4.6 (8b2d7684407b)
-            return self._ctx.divergent()
+        return self._ctx.contentdivergent()
 
     @LazyProperty
     def extinct(self):
@@ -65,10 +62,7 @@
 
     @LazyProperty
     def unstable(self):
-        try:
-            return self._ctx.orphan()
-        except AttributeError: # renamed in Mercurial 4.6 (03039ff3082b)
-            return self._ctx.unstable()
+        return self._ctx.orphan()
 
     @LazyProperty
     def phase(self):
@@ -81,33 +75,30 @@
 
     @LazyProperty
     def successors(self):
-        successors = obsutil.successorssets(self._ctx._repo, self._ctx.node(), closest=True)
-        if successors:
-            # flatten the list here handles both divergent (len > 1)
-            # and the usual case (len = 1)
-            successors = [hex(n)[:12] for sub in successors for n in sub if n != self._ctx.node()]
-
-        return successors
+        successors = mercurial.obsutil.successorssets(self._ctx._repo, self._ctx.node(), closest=True)
+        # flatten the list here handles both divergent (len > 1)
+        # and the usual case (len = 1)
+        return [safe_str(mercurial.node.hex(n)[:12]) for sub in successors for n in sub if n != self._ctx.node()]
 
     @LazyProperty
     def predecessors(self):
-        return [hex(n)[:12] for n in obsutil.closestpredecessors(self._ctx._repo, self._ctx.node())]
+        return [safe_str(mercurial.node.hex(n)[:12]) for n in mercurial.obsutil.closestpredecessors(self._ctx._repo, self._ctx.node())]
 
     @LazyProperty
     def bookmarks(self):
-        return map(safe_unicode, self._ctx.bookmarks())
+        return [safe_str(bookmark) for bookmark in self._ctx.bookmarks()]
 
     @LazyProperty
     def message(self):
-        return safe_unicode(self._ctx.description())
+        return safe_str(self._ctx.description())
 
     @LazyProperty
     def committer(self):
-        return safe_unicode(self.author)
+        return safe_str(self.author)
 
     @LazyProperty
     def author(self):
-        return safe_unicode(self._ctx.user())
+        return safe_str(self._ctx.user())
 
     @LazyProperty
     def date(self):
@@ -127,7 +118,7 @@
 
     @LazyProperty
     def _file_paths(self):
-        return list(self._ctx)
+        return list(safe_str(f) for f in self._ctx)
 
     @LazyProperty
     def _dir_paths(self):
@@ -140,12 +131,6 @@
         return self._dir_paths + self._file_paths
 
     @LazyProperty
-    def id(self):
-        if self.last:
-            return u'tip'
-        return self.short_id
-
-    @LazyProperty
     def short_id(self):
         return self.raw_id[:12]
 
@@ -202,22 +187,11 @@
                 return cs
 
     def diff(self):
-        # Only used for feed diffstat
-        return ''.join(self._ctx.diff())
-
-    def _fix_path(self, path):
-        """
-        Paths are stored without trailing slash so we need to get rid off it if
-        needed. Also mercurial keeps filenodes as str so we need to decode
-        from unicode to str
-        """
-        if path.endswith('/'):
-            path = path.rstrip('/')
-
-        return safe_str(path)
+        # Only used to feed diffstat
+        return b''.join(self._ctx.diff())
 
     def _get_kind(self, path):
-        path = self._fix_path(path)
+        path = path.rstrip('/')
         if path in self._file_paths:
             return NodeKind.FILE
         elif path in self._dir_paths:
@@ -227,11 +201,11 @@
                 % (path))
 
     def _get_filectx(self, path):
-        path = self._fix_path(path)
+        path = path.rstrip('/')
         if self._get_kind(path) != NodeKind.FILE:
             raise ChangesetError("File does not exist for revision %s at "
                 " '%s'" % (self.raw_id, path))
-        return self._ctx.filectx(path)
+        return self._ctx.filectx(safe_bytes(path))
 
     def _extract_submodules(self):
         """
@@ -245,10 +219,10 @@
         Returns stat mode of the file at the given ``path``.
         """
         fctx = self._get_filectx(path)
-        if 'x' in fctx.flags():
-            return 0100755
+        if b'x' in fctx.flags():
+            return 0o100755
         else:
-            return 0100644
+            return 0o100644
 
     def get_file_content(self, path):
         """
@@ -280,7 +254,7 @@
         cnt = 0
         for cs in reversed([x for x in fctx.filelog()]):
             cnt += 1
-            hist.append(hex(fctx.filectx(cs).node()))
+            hist.append(mercurial.node.hex(fctx.filectx(cs).node()))
             if limit is not None and cnt == limit:
                 break
 
@@ -292,13 +266,10 @@
             lineno, sha, changeset lazy loader and line
         """
         annotations = self._get_filectx(path).annotate()
-        try:
-            annotation_lines = [(annotateline.fctx, annotateline.text) for annotateline in annotations]
-        except AttributeError: # annotateline was introduced in Mercurial 4.6 (b33b91ca2ec2)
-            annotation_lines = [(aline.fctx, l) for aline, l in annotations]
-        for i, (fctx, l) in enumerate(annotation_lines):
-            sha = fctx.hex()
-            yield (i + 1, sha, lambda sha=sha, l=l: self.repository.get_changeset(sha), l)
+        annotation_lines = [(annotateline.fctx, annotateline.text) for annotateline in annotations]
+        for i, (fctx, line) in enumerate(annotation_lines):
+            sha = ascii_str(fctx.hex())
+            yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line)
 
     def fill_archive(self, stream=None, kind='tgz', prefix=None,
                      subrepos=False):
@@ -316,11 +287,10 @@
         :raise ImproperArchiveTypeError: If given kind is wrong.
         :raise VcsError: If given stream is None
         """
-
-        allowed_kinds = settings.ARCHIVE_SPECS.keys()
+        allowed_kinds = settings.ARCHIVE_SPECS
         if kind not in allowed_kinds:
             raise ImproperArchiveTypeError('Archive kind not supported use one'
-                'of %s' % allowed_kinds)
+                'of %s' % ' '.join(allowed_kinds))
 
         if stream is None:
             raise VCSError('You need to pass in a valid stream for filling'
@@ -333,8 +303,8 @@
         elif prefix.strip() == '':
             raise VCSError("Prefix cannot be empty")
 
-        archival.archive(self.repository._repo, stream, self.raw_id,
-                         kind, prefix=prefix, subrepos=subrepos)
+        mercurial.archival.archive(self.repository._repo, stream, ascii_bytes(self.raw_id),
+                         safe_bytes(kind), prefix=safe_bytes(prefix), subrepos=subrepos)
 
     def get_nodes(self, path):
         """
@@ -346,8 +316,7 @@
         if self._get_kind(path) != NodeKind.DIR:
             raise ChangesetError("Directory does not exist for revision %s at "
                 " '%s'" % (self.revision, path))
-        path = self._fix_path(path)
-
+        path = path.rstrip('/')
         filenodes = [FileNode(f, changeset=self) for f in self._file_paths
             if os.path.dirname(f) == path]
         dirs = path == '' and '' or [d for d in self._dir_paths
@@ -356,18 +325,16 @@
             if os.path.dirname(d) == path]
 
         als = self.repository.alias
-        for k, vals in self._extract_submodules().iteritems():
+        for k, vals in self._extract_submodules().items():
             #vals = url,rev,type
             loc = vals[0]
             cs = vals[1]
             dirnodes.append(SubModuleNode(k, url=loc, changeset=cs,
                                           alias=als))
         nodes = dirnodes + filenodes
-        # cache nodes
         for node in nodes:
             self.nodes[node.path] = node
         nodes.sort()
-
         return nodes
 
     def get_node(self, path):
@@ -375,9 +342,7 @@
         Returns ``Node`` object from the given ``path``. If there is no node at
         the given ``path``, ``ChangesetError`` would be raised.
         """
-
-        path = self._fix_path(path)
-
+        path = path.rstrip('/')
         if path not in self.nodes:
             if path in self._file_paths:
                 node = FileNode(path, changeset=self)
@@ -406,21 +371,21 @@
         """
         Returns list of added ``FileNode`` objects.
         """
-        return AddedFileNodesGenerator([n for n in self.status[1]], self)
+        return AddedFileNodesGenerator([safe_str(n) for n in self.status.added], self)
 
     @property
     def changed(self):
         """
         Returns list of modified ``FileNode`` objects.
         """
-        return ChangedFileNodesGenerator([n for n in self.status[0]], self)
+        return ChangedFileNodesGenerator([safe_str(n) for n in self.status.modified], self)
 
     @property
     def removed(self):
         """
         Returns list of removed ``FileNode`` objects.
         """
-        return RemovedFileNodesGenerator([n for n in self.status[2]], self)
+        return RemovedFileNodesGenerator([safe_str(n) for n in self.status.removed], self)
 
     @LazyProperty
     def extra(self):
--- a/kallithea/lib/vcs/backends/hg/inmemory.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/hg/inmemory.py	Sat May 02 21:20:43 2020 +0200
@@ -1,14 +1,17 @@
 import datetime
 
+import mercurial.context
+import mercurial.node
+
 from kallithea.lib.vcs.backends.base import BaseInMemoryChangeset
 from kallithea.lib.vcs.exceptions import RepositoryError
-from kallithea.lib.vcs.utils.hgcompat import hex, memctx, memfilectx, tolocal
+from kallithea.lib.vcs.utils import ascii_str, safe_bytes, safe_str
 
 
 class MercurialInMemoryChangeset(BaseInMemoryChangeset):
 
     def commit(self, message, author, parents=None, branch=None, date=None,
-            **kwargs):
+               **kwargs):
         """
         Performs in-memory commit (doesn't check workdir in any way) and
         returns newly created ``Changeset``. Updates repository's
@@ -27,21 +30,22 @@
         """
         self.check_integrity(parents)
 
+        if not isinstance(message, str):
+            raise RepositoryError('message must be a str - got %r' % type(message))
+        if not isinstance(author, str):
+            raise RepositoryError('author must be a str - got %r' % type(author))
+
         from .repository import MercurialRepository
-        if not isinstance(message, unicode) or not isinstance(author, unicode):
-            raise RepositoryError('Given message and author needs to be '
-                                  'an <unicode> instance got %r & %r instead'
-                                  % (type(message), type(author)))
-
         if branch is None:
             branch = MercurialRepository.DEFAULT_BRANCH_NAME
-        kwargs['branch'] = branch
+        kwargs[b'branch'] = safe_bytes(branch)
 
-        def filectxfn(_repo, memctx, path):
+        def filectxfn(_repo, memctx, bytes_path):
             """
-            Marks given path as added/changed/removed in a given _repo. This is
-            for internal mercurial commit function.
+            Callback from Mercurial, returning ctx to commit for the given
+            path.
             """
+            path = safe_str(bytes_path)
 
             # check if this path is removed
             if path in (node.path for node in self.removed):
@@ -50,9 +54,8 @@
             # check if this path is added
             for node in self.added:
                 if node.path == path:
-                    return memfilectx(_repo, memctx, path=node.path,
-                        data=(node.content.encode('utf-8')
-                              if not node.is_binary else node.content),
+                    return mercurial.context.memfilectx(_repo, memctx, path=bytes_path,
+                        data=node.content,
                         islink=False,
                         isexec=node.is_executable,
                         copysource=False)
@@ -60,14 +63,13 @@
             # or changed
             for node in self.changed:
                 if node.path == path:
-                    return memfilectx(_repo, memctx, path=node.path,
-                        data=(node.content.encode('utf-8')
-                              if not node.is_binary else node.content),
+                    return mercurial.context.memfilectx(_repo, memctx, path=bytes_path,
+                        data=node.content,
                         islink=False,
                         isexec=node.is_executable,
                         copysource=False)
 
-            raise RepositoryError("Given path haven't been marked as added,"
+            raise RepositoryError("Given path haven't been marked as added, "
                                   "changed or removed (%s)" % path)
 
         parents = [None, None]
@@ -76,22 +78,21 @@
                 parents[i] = parent._ctx.node()
 
         if date and isinstance(date, datetime.datetime):
-            date = date.strftime('%a, %d %b %Y %H:%M:%S')
+            date = safe_bytes(date.strftime('%a, %d %b %Y %H:%M:%S'))
 
-        commit_ctx = memctx(repo=self.repository._repo,
+        commit_ctx = mercurial.context.memctx(
+            repo=self.repository._repo,
             parents=parents,
-            text='',
-            files=self.get_paths(),
+            text=b'',
+            files=[safe_bytes(x) for x in self.get_paths()],
             filectxfn=filectxfn,
-            user=author,
+            user=safe_bytes(author),
             date=date,
             extra=kwargs)
 
-        loc = lambda u: tolocal(u.encode('utf-8'))
-
         # injecting given _repo params
-        commit_ctx._text = loc(message)
-        commit_ctx._user = loc(author)
+        commit_ctx._text = safe_bytes(message)
+        commit_ctx._user = safe_bytes(author)
         commit_ctx._date = date
 
         # TODO: Catch exceptions!
@@ -100,9 +101,8 @@
         self._commit_ctx = commit_ctx  # For reference
         # Update vcs repository object & recreate mercurial _repo
         # new_ctx = self.repository._repo[node]
-        # new_tip = self.repository.get_changeset(new_ctx.hex())
-        new_id = hex(n)
-        self.repository.revisions.append(new_id)
+        # new_tip = ascii_str(self.repository.get_changeset(new_ctx.hex()))
+        self.repository.revisions.append(ascii_str(mercurial.node.hex(n)))
         self._repo = self.repository._get_repo(create=False)
         self.repository.branches = self.repository._get_branches()
         tip = self.repository.get_changeset()
--- a/kallithea/lib/vcs/backends/hg/repository.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/hg/repository.py	Sat May 02 21:20:43 2020 +0200
@@ -13,16 +13,33 @@
 import logging
 import os
 import time
-import urllib
-import urllib2
+import urllib.error
+import urllib.parse
+import urllib.request
 from collections import OrderedDict
 
+import mercurial.commands
+import mercurial.error
+import mercurial.exchange
+import mercurial.hg
+import mercurial.hgweb
+import mercurial.httppeer
+import mercurial.localrepo
+import mercurial.match
+import mercurial.mdiff
+import mercurial.node
+import mercurial.patch
+import mercurial.scmutil
+import mercurial.sshpeer
+import mercurial.tags
+import mercurial.ui
+import mercurial.url
+import mercurial.util
+
 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
-from kallithea.lib.vcs.exceptions import (
-    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
-from kallithea.lib.vcs.utils import author_email, author_name, date_fromtimestamp, makedate, safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import (
-    Abort, RepoError, RepoLookupError, clone, diffopts, get_contact, hex, hg_url, httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepo, match_exact, nullid, patch, peer, scmutil, sshpeer, tag, ui)
+from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
+                                          TagDoesNotExistError, VCSError)
+from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 from kallithea.lib.vcs.utils.paths import abspath
 
@@ -60,9 +77,8 @@
             raise VCSError('Mercurial backend requires repository path to '
                            'be instance of <str> got %s instead' %
                            type(repo_path))
-
         self.path = abspath(repo_path)
-        self.baseui = baseui or ui.ui()
+        self.baseui = baseui or mercurial.ui.ui()
         # We've set path and ui, now we can set _repo itself
         self._repo = self._get_repo(create, src_url, update_after_clone)
 
@@ -115,14 +131,13 @@
             return {}
 
         bt = OrderedDict()
-        for bn, _heads, tip, isclosed in sorted(self._repo.branchmap().iterbranches()):
+        for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()):
             if isclosed:
                 if closed:
-                    bt[safe_unicode(bn)] = hex(tip)
+                    bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
             else:
                 if normal:
-                    bt[safe_unicode(bn)] = hex(tip)
-
+                    bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
         return bt
 
     @LazyProperty
@@ -136,11 +151,11 @@
         if self._empty:
             return {}
 
-        sortkey = lambda ctx: ctx[0]  # sort by name
-        _tags = [(safe_unicode(n), hex(h),) for n, h in
-                 self._repo.tags().items()]
-
-        return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
+        return OrderedDict(sorted(
+            ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()),
+            reverse=True,
+            key=lambda x: x[0],  # sort by name
+        ))
 
     def tag(self, name, user, revision=None, message=None, date=None,
             **kwargs):
@@ -165,12 +180,12 @@
                 changeset.short_id)
 
         if date is None:
-            date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')
+            date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
 
         try:
-            tag(self._repo, name, changeset._ctx.node(), message, local, user, date)
-        except Abort as e:
-            raise RepositoryError(e.message)
+            mercurial.tags.tag(self._repo, safe_bytes(name), changeset._ctx.node(), safe_bytes(message), local, safe_bytes(user), date)
+        except mercurial.error.Abort as e:
+            raise RepositoryError(e.args[0])
 
         # Reinitialize tags
         self.tags = self._get_tags()
@@ -194,14 +209,14 @@
         if message is None:
             message = "Removed tag %s" % name
         if date is None:
-            date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')
+            date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
         local = False
 
         try:
-            tag(self._repo, name, nullid, message, local, user, date)
+            mercurial.tags.tag(self._repo, safe_bytes(name), mercurial.commands.nullid, safe_bytes(message), local, safe_bytes(user), date)
             self.tags = self._get_tags()
-        except Abort as e:
-            raise RepositoryError(e.message)
+        except mercurial.error.Abort as e:
+            raise RepositoryError(e.args[0])
 
     @LazyProperty
     def bookmarks(self):
@@ -214,14 +229,14 @@
         if self._empty:
             return {}
 
-        sortkey = lambda ctx: ctx[0]  # sort by name
-        _bookmarks = [(safe_unicode(n), hex(h),) for n, h in
-                 self._repo._bookmarks.items()]
-        return OrderedDict(sorted(_bookmarks, key=sortkey, reverse=True))
+        return OrderedDict(sorted(
+            ((safe_str(n), ascii_str(h)) for n, h in self._repo._bookmarks.items()),
+            reverse=True,
+            key=lambda x: x[0],  # sort by name
+        ))
 
     def _get_all_revisions(self):
-
-        return [self._repo[x].hex() for x in self._repo.filtered('visible').changelog.revs()]
+        return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()]
 
     def get_diff(self, rev1, rev2, path='', ignore_whitespace=False,
                   context=3):
@@ -257,12 +272,12 @@
             self.get_changeset(rev1)
         self.get_changeset(rev2)
         if path:
-            file_filter = match_exact(path)
+            file_filter = mercurial.match.exact(path)
         else:
             file_filter = None
 
-        return ''.join(patch.diff(self._repo, rev1, rev2, match=file_filter,
-                          opts=diffopts(git=True,
+        return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter,
+                          opts=mercurial.mdiff.diffopts(git=True,
                                         showfunc=True,
                                         ignorews=ignore_whitespace,
                                         context=context)))
@@ -279,42 +294,46 @@
         when the return code is non 200
         """
         # check first if it's not an local url
-        if os.path.isdir(url) or url.startswith('file:'):
+        url = safe_bytes(url)
+        if os.path.isdir(url) or url.startswith(b'file:'):
             return True
 
-        if url.startswith('ssh:'):
+        if url.startswith(b'ssh:'):
             # in case of invalid uri or authentication issues, sshpeer will
             # throw an exception.
-            sshpeer.instance(repoui or ui.ui(), url, False).lookup('tip')
+            mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
             return True
 
         url_prefix = None
-        if '+' in url[:url.find('://')]:
-            url_prefix, url = url.split('+', 1)
+        if b'+' in url[:url.find(b'://')]:
+            url_prefix, url = url.split(b'+', 1)
 
         handlers = []
-        url_obj = hg_url(url)
+        url_obj = mercurial.util.url(url)
         test_uri, authinfo = url_obj.authinfo()
-        url_obj.passwd = '*****'
+        url_obj.passwd = b'*****'
         cleaned_uri = str(url_obj)
 
         if authinfo:
             # create a password manager
-            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+            passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
             passmgr.add_password(*authinfo)
 
-            handlers.extend((httpbasicauthhandler(passmgr),
-                             httpdigestauthhandler(passmgr)))
+            handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
+                             mercurial.url.httpdigestauthhandler(passmgr)))
 
-        o = urllib2.build_opener(*handlers)
+        o = urllib.request.build_opener(*handlers)
         o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
                         ('Accept', 'application/mercurial-0.1')]
 
-        q = {"cmd": 'between'}
-        q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
-        qs = '?%s' % urllib.urlencode(q)
-        cu = "%s%s" % (test_uri, qs)
-        req = urllib2.Request(cu, None, {})
+        req = urllib.request.Request(
+            "%s?%s" % (
+                test_uri,
+                urllib.parse.urlencode({
+                    'cmd': 'between',
+                    'pairs': "%s-%s" % ('0' * 40, '0' * 40),
+                })
+            ))
 
         try:
             resp = o.open(req)
@@ -322,14 +341,14 @@
                 raise Exception('Return Code is not 200')
         except Exception as e:
             # means it cannot be cloned
-            raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
+            raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
 
         if not url_prefix: # skip svn+http://... (and git+... too)
             # now check if it's a proper hg repo
             try:
-                httppeer.instance(repoui or ui.ui(), url, False).lookup('tip')
+                mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
             except Exception as e:
-                raise urllib2.URLError(
+                raise urllib.error.URLError(
                     "url [%s] does not look like an hg repo org_exc: %s"
                     % (cleaned_uri, e))
 
@@ -345,26 +364,25 @@
         location at given clone_point. Additionally it'll make update to
         working copy accordingly to ``update_after_clone`` flag
         """
-
         try:
             if src_url:
-                url = safe_str(self._get_url(src_url))
+                url = safe_bytes(self._get_url(src_url))
                 opts = {}
                 if not update_after_clone:
                     opts.update({'noupdate': True})
                 MercurialRepository._check_url(url, self.baseui)
-                clone(self.baseui, url, self.path, **opts)
+                mercurial.commands.clone(self.baseui, url, safe_bytes(self.path), **opts)
 
                 # Don't try to create if we've already cloned repo
                 create = False
-            return localrepo.instance(self.baseui, self.path, create=create)
-        except (Abort, RepoError) as err:
+            return mercurial.localrepo.instance(self.baseui, safe_bytes(self.path), create=create)
+        except (mercurial.error.Abort, mercurial.error.RepoError) as err:
             if create:
                 msg = "Cannot create repository at %s. Original error was %s" \
-                    % (self.path, err)
+                    % (self.name, err)
             else:
                 msg = "Not valid repository at %s. Original error was %s" \
-                    % (self.path, err)
+                    % (self.name, err)
             raise RepositoryError(msg)
 
     @LazyProperty
@@ -373,15 +391,13 @@
 
     @LazyProperty
     def description(self):
-        undefined_description = u'unknown'
-        _desc = self._repo.ui.config('web', 'description', None, untrusted=True)
-        return safe_unicode(_desc or undefined_description)
+        _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True)
+        return safe_str(_desc or b'unknown')
 
     @LazyProperty
     def contact(self):
-        undefined_contact = u'Unknown'
-        return safe_unicode(get_contact(self._repo.ui.config)
-                            or undefined_contact)
+        return safe_str(mercurial.hgweb.common.get_contact(self._repo.ui.config)
+                            or b'Unknown')
 
     @LazyProperty
     def last_change(self):
@@ -404,39 +420,33 @@
 
     def _get_revision(self, revision):
         """
-        Gets an ID revision given as str. This will always return a full
-        40 char revision number
+        Given any revision identifier, returns a 40 char string with revision hash.
 
         :param revision: str or int or None
         """
-        if isinstance(revision, unicode):
-            revision = safe_str(revision)
-
         if self._empty:
             raise EmptyRepositoryError("There are no changesets yet")
 
         if revision in [-1, None]:
-            revision = 'tip'
+            revision = b'tip'
+        elif isinstance(revision, str):
+            revision = safe_bytes(revision)
 
         try:
             if isinstance(revision, int):
-                return self._repo[revision].hex()
-            try:
-                return scmutil.revsymbol(self._repo, revision).hex()
-            except AttributeError: # revsymbol was introduced in Mercurial 4.6
-                return self._repo[revision].hex()
-        except (IndexError, ValueError, RepoLookupError, TypeError):
-            msg = ("Revision %s does not exist for %s" % (revision, self))
+                return ascii_str(self._repo[revision].hex())
+            return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex())
+        except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError):
+            msg = "Revision %r does not exist for %s" % (safe_str(revision), self.name)
             raise ChangesetDoesNotExistError(msg)
         except (LookupError, ):
-            msg = ("Ambiguous identifier `%s` for %s" % (revision, self))
+            msg = "Ambiguous identifier `%s` for %s" % (safe_str(revision), self.name)
             raise ChangesetDoesNotExistError(msg)
 
     def get_ref_revision(self, ref_type, ref_name):
         """
         Returns revision number for the given reference.
         """
-        ref_name = safe_str(ref_name)
         if ref_type == 'rev' and not ref_name.strip('0'):
             return self.EMPTY_CHANGESET
         # lookup up the exact node id
@@ -451,17 +461,13 @@
         try:
             revs = self._repo.revs(rev_spec, ref_name, ref_name)
         except LookupError:
-            msg = ("Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name))
+            msg = "Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name)
             raise ChangesetDoesNotExistError(msg)
-        except RepoLookupError:
-            msg = ("Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name))
+        except mercurial.error.RepoLookupError:
+            msg = "Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name)
             raise ChangesetDoesNotExistError(msg)
         if revs:
-            try:
-                revision = revs.last()
-            except AttributeError:
-                # removed in hg 3.2
-                revision = revs[-1]
+            revision = revs.last()
         else:
             # TODO: just report 'not found'?
             revision = ref_name
@@ -469,39 +475,29 @@
         return self._get_revision(revision)
 
     def _get_archives(self, archive_name='tip'):
-        allowed = self.baseui.configlist("web", "allow_archive",
+        allowed = self.baseui.configlist(b"web", b"allow_archive",
                                          untrusted=True)
-        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
-            if i[0] in allowed or self._repo.ui.configbool("web",
-                                                           "allow" + i[0],
+        for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]:
+            if name in allowed or self._repo.ui.configbool(b"web",
+                                                           b"allow" + name,
                                                            untrusted=True):
-                yield {"type": i[0], "extension": i[1], "node": archive_name}
+                yield {"type": safe_str(name), "extension": ext, "node": archive_name}
 
     def _get_url(self, url):
         """
-        Returns normalized url. If schema is not given, would fall
-        to filesystem
-        (``file:///``) schema.
+        Returns normalized url. If schema is not given, fall back to
+        filesystem (``file:///``) schema.
         """
-        url = safe_str(url)
         if url != 'default' and '://' not in url:
-            url = "file:" + urllib.pathname2url(url)
+            url = "file:" + urllib.request.pathname2url(url)
         return url
 
-    def get_hook_location(self):
-        """
-        returns absolute path to location where hooks are stored
-        """
-        return os.path.join(self.path, '.hg', '.hgrc')
-
     def get_changeset(self, revision=None):
         """
         Returns ``MercurialChangeset`` object representing repository's
         changeset at the given ``revision``.
         """
-        revision = self._get_revision(revision)
-        changeset = MercurialChangeset(repository=self, revision=revision)
-        return changeset
+        return MercurialChangeset(repository=self, revision=self._get_revision(revision))
 
     def get_changesets(self, start=None, end=None, start_date=None,
                        end_date=None, branch_name=None, reverse=False, max_revisions=None):
@@ -517,35 +513,35 @@
         :param reversed: return changesets in reversed order
         """
         start_raw_id = self._get_revision(start)
-        start_pos = self.revisions.index(start_raw_id) if start else None
+        start_pos = None if start is None else self.revisions.index(start_raw_id)
         end_raw_id = self._get_revision(end)
-        end_pos = self.revisions.index(end_raw_id) if end else None
+        end_pos = None if end is None else self.revisions.index(end_raw_id)
 
-        if None not in [start, end] and start_pos > end_pos:
+        if start_pos is not None and end_pos is not None and start_pos > end_pos:
             raise RepositoryError("Start revision '%s' cannot be "
                                   "after end revision '%s'" % (start, end))
 
-        if branch_name and branch_name not in self.allbranches.keys():
-            msg = ("Branch %s not found in %s" % (branch_name, self))
+        if branch_name and branch_name not in self.allbranches:
+            msg = "Branch %r not found in %s" % (branch_name, self.name)
             raise BranchDoesNotExistError(msg)
         if end_pos is not None:
             end_pos += 1
         # filter branches
         filter_ = []
         if branch_name:
-            filter_.append('branch("%s")' % safe_str(branch_name))
+            filter_.append(b'branch("%s")' % safe_bytes(branch_name))
         if start_date:
-            filter_.append('date(">%s")' % start_date)
+            filter_.append(b'date(">%s")' % safe_bytes(str(start_date)))
         if end_date:
-            filter_.append('date("<%s")' % end_date)
+            filter_.append(b'date("<%s")' % safe_bytes(str(end_date)))
         if filter_ or max_revisions:
             if filter_:
-                revspec = ' and '.join(filter_)
+                revspec = b' and '.join(filter_)
             else:
-                revspec = 'all()'
+                revspec = b'all()'
             if max_revisions:
-                revspec = 'limit(%s, %s)' % (revspec, max_revisions)
-            revisions = scmutil.revrange(self._repo, [revspec])
+                revspec = b'limit(%s, %d)' % (revspec, max_revisions)
+            revisions = mercurial.scmutil.revrange(self._repo, [revspec])
         else:
             revisions = self.revisions
 
@@ -553,7 +549,7 @@
         # would be to get rid of this function entirely and use revsets
         revs = list(revisions)[start_pos:end_pos]
         if reverse:
-            revs = reversed(revs)
+            revs.reverse()
 
         return CollectionGenerator(self, revs)
 
@@ -561,15 +557,10 @@
         """
         Tries to pull changes from external location.
         """
-        url = self._get_url(url)
-        other = peer(self._repo, {}, url)
+        other = mercurial.hg.peer(self._repo, {}, safe_bytes(self._get_url(url)))
         try:
-            # hg 3.2 moved push / pull to exchange module
-            from mercurial import exchange
-            exchange.pull(self._repo, other, heads=None, force=None)
-        except ImportError:
-            self._repo.pull(other, heads=None, force=None)
-        except Abort as err:
+            mercurial.exchange.pull(self._repo, other, heads=None, force=None)
+        except mercurial.error.Abort as err:
             # Propagate error but with vcs's type
             raise RepositoryError(str(err))
 
@@ -591,15 +582,16 @@
         """
         if config_file is None:
             config_file = []
-        elif isinstance(config_file, basestring):
+        elif isinstance(config_file, str):
             config_file = [config_file]
 
         config = self._repo.ui
         if config_file:
-            config = ui.ui()
+            config = mercurial.ui.ui()
             for path in config_file:
-                config.readconfig(path)
-        return config.config(section, name)
+                config.readconfig(safe_bytes(path))
+        value = config.config(safe_bytes(section), safe_bytes(name))
+        return value if value is None else safe_str(value)
 
     def get_user_name(self, config_file=None):
         """
--- a/kallithea/lib/vcs/backends/hg/ssh.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/hg/ssh.py	Sat May 02 21:20:43 2020 +0200
@@ -14,18 +14,12 @@
 
 import logging
 
-from mercurial import hg
+import mercurial.hg
+import mercurial.wireprotoserver
 
 from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_str, safe_unicode
 from kallithea.lib.vcs.backends.ssh import BaseSshHandler
-
-
-try:
-    from mercurial.wireprotoserver import sshserver
-except ImportError:
-    from mercurial.sshserver import sshserver # moved in Mercurial 4.6 (1bf5263fe5cc)
-
+from kallithea.lib.vcs.utils import safe_bytes
 
 
 log = logging.getLogger(__name__)
@@ -40,11 +34,11 @@
         >>> import shlex
 
         >>> MercurialSshHandler.make(shlex.split('hg -R "foo bar" serve --stdio')).repo_name
-        u'foo bar'
+        'foo bar'
         >>> MercurialSshHandler.make(shlex.split(' hg -R blåbærgrød serve --stdio ')).repo_name
-        u'bl\xe5b\xe6rgr\xf8d'
+        'bl\xe5b\xe6rgr\xf8d'
         >>> MercurialSshHandler.make(shlex.split('''hg -R 'foo"bar' serve --stdio''')).repo_name
-        u'foo"bar'
+        'foo"bar'
 
         >>> MercurialSshHandler.make(shlex.split('/bin/hg -R "foo" serve --stdio'))
         >>> MercurialSshHandler.make(shlex.split('''hg -R "foo"bar" serve --stdio''')) # ssh-serve will report: Error parsing SSH command "...": invalid syntax
@@ -53,7 +47,7 @@
         >>> MercurialSshHandler.make(shlex.split('git-upload-pack "/foo"')) # not handled here
         """
         if ssh_command_parts[:2] == ['hg', '-R'] and ssh_command_parts[3:] == ['serve', '--stdio']:
-            return cls(safe_unicode(ssh_command_parts[2]))
+            return cls(ssh_command_parts[2])
 
         return None
 
@@ -61,9 +55,9 @@
         # Note: we want a repo with config based on .hg/hgrc and can thus not use self.db_repo.scm_instance._repo.ui
         baseui = make_ui(repo_path=self.db_repo.repo_full_path)
         if not self.allow_push:
-            baseui.setconfig('hooks', 'pretxnopen._ssh_reject', 'python:kallithea.lib.hooks.rejectpush')
-            baseui.setconfig('hooks', 'prepushkey._ssh_reject', 'python:kallithea.lib.hooks.rejectpush')
+            baseui.setconfig(b'hooks', b'pretxnopen._ssh_reject', b'python:kallithea.lib.hooks.rejectpush')
+            baseui.setconfig(b'hooks', b'prepushkey._ssh_reject', b'python:kallithea.lib.hooks.rejectpush')
 
-        repo = hg.repository(baseui, safe_str(self.db_repo.repo_full_path))
+        repo = mercurial.hg.repository(baseui, safe_bytes(self.db_repo.repo_full_path))
         log.debug("Starting Mercurial sshserver for %s", self.db_repo.repo_full_path)
-        sshserver(baseui, repo).serve_forever()
+        mercurial.wireprotoserver.sshserver(baseui, repo).serve_forever()
--- a/kallithea/lib/vcs/backends/hg/workdir.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/hg/workdir.py	Sat May 02 21:20:43 2020 +0200
@@ -1,15 +1,17 @@
+import mercurial.merge
+
 from kallithea.lib.vcs.backends.base import BaseWorkdir
 from kallithea.lib.vcs.exceptions import BranchDoesNotExistError
-from kallithea.lib.vcs.utils.hgcompat import hg_merge
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_str
 
 
 class MercurialWorkdir(BaseWorkdir):
 
     def get_branch(self):
-        return self.repository._repo.dirstate.branch()
+        return safe_str(self.repository._repo.dirstate.branch())
 
     def get_changeset(self):
-        wk_dir_id = self.repository._repo[None].parents()[0].hex()
+        wk_dir_id = ascii_str(self.repository._repo[None].parents()[0].hex())
         return self.repository.get_changeset(wk_dir_id)
 
     def checkout_branch(self, branch=None):
@@ -19,4 +21,4 @@
             raise BranchDoesNotExistError
 
         raw_id = self.repository.branches[branch]
-        hg_merge.update(self.repository._repo, raw_id, False, False, None)
+        mercurial.merge.update(self.repository._repo, ascii_bytes(raw_id), False, False, None)
--- a/kallithea/lib/vcs/backends/ssh.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/backends/ssh.py	Sat May 02 21:20:43 2020 +0200
@@ -24,7 +24,7 @@
 import sys
 
 from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
-from kallithea.lib.utils2 import safe_str, set_hook_environment
+from kallithea.lib.utils2 import set_hook_environment
 from kallithea.model.db import Repository, User, UserSshKeys
 from kallithea.model.meta import Session
 
@@ -82,7 +82,7 @@
         elif HasPermissionAnyMiddleware('repository.read')(self.authuser, self.repo_name):
             self.allow_push = False
         else:
-            self.exit('Access to %r denied' % safe_str(self.repo_name))
+            self.exit('Access to %r denied' % self.repo_name)
 
         self.db_repo = Repository.get_by_repo_name(self.repo_name)
         if self.db_repo is None:
--- a/kallithea/lib/vcs/conf/settings.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/conf/settings.py	Sat May 02 21:20:43 2020 +0200
@@ -1,24 +1,7 @@
-import os
-import tempfile
-
 from kallithea.lib.vcs.utils import aslist
-from kallithea.lib.vcs.utils.paths import get_user_home
 
 
-abspath = lambda * p: os.path.abspath(os.path.join(*p))
-
-VCSRC_PATH = os.environ.get('VCSRC_PATH')
-
-if not VCSRC_PATH:
-    HOME_ = get_user_home()
-    if not HOME_:
-        HOME_ = tempfile.gettempdir()
-
-VCSRC_PATH = VCSRC_PATH or abspath(HOME_, '.vcsrc')
-if os.path.isdir(VCSRC_PATH):
-    VCSRC_PATH = os.path.join(VCSRC_PATH, '__init__.py')
-
-# list of default encoding used in safe_unicode/safe_str methods
+# list of default encoding used in safe_str/safe_bytes methods
 DEFAULT_ENCODINGS = aslist('utf-8')
 
 # path to git executable run by run_git_command function
--- a/kallithea/lib/vcs/exceptions.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/exceptions.py	Sat May 02 21:20:43 2020 +0200
@@ -30,10 +30,6 @@
     pass
 
 
-class BranchAlreadyExistError(RepositoryError):
-    pass
-
-
 class BranchDoesNotExistError(RepositoryError):
     pass
 
@@ -50,10 +46,6 @@
     pass
 
 
-class NothingChangedError(CommitError):
-    pass
-
-
 class NodeError(VCSError):
     pass
 
@@ -88,7 +80,3 @@
 
 class ImproperArchiveTypeError(VCSError):
     pass
-
-
-class CommandError(VCSError):
-    pass
--- a/kallithea/lib/vcs/nodes.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/nodes.py	Sat May 02 21:20:43 2020 +0200
@@ -9,13 +9,14 @@
     :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 """
 
+import functools
 import mimetypes
 import posixpath
 import stat
 
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import NodeError, RemovedFileNodeError
-from kallithea.lib.vcs.utils import safe_str, safe_unicode
+from kallithea.lib.vcs.utils import safe_bytes, safe_str
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 
 
@@ -26,10 +27,10 @@
 
 
 class NodeState:
-    ADDED = u'added'
-    CHANGED = u'changed'
-    NOT_CHANGED = u'not changed'
-    REMOVED = u'removed'
+    ADDED = 'added'
+    CHANGED = 'changed'
+    NOT_CHANGED = 'not changed'
+    REMOVED = 'removed'
 
 
 class NodeGeneratorBase(object):
@@ -44,11 +45,9 @@
         self.cs = cs
         self.current_paths = current_paths
 
-    def __call__(self):
-        return [n for n in self]
-
-    def __getslice__(self, i, j):
-        for p in self.current_paths[i:j]:
+    def __getitem__(self, key):
+        assert isinstance(key, slice), key
+        for p in self.current_paths[key]:
             yield self.cs.get_node(p)
 
     def __len__(self):
@@ -81,11 +80,13 @@
         for p in self.current_paths:
             yield RemovedFileNode(path=p)
 
-    def __getslice__(self, i, j):
-        for p in self.current_paths[i:j]:
+    def __getitem__(self, key):
+        assert isinstance(key, slice), key
+        for p in self.current_paths[key]:
             yield RemovedFileNode(path=p)
 
 
+@functools.total_ordering
 class Node(object):
     """
     Simplest class representing file or directory on repository.  SCM backends
@@ -101,7 +102,7 @@
         if path.startswith('/'):
             raise NodeError("Cannot initialize Node objects with slash at "
                             "the beginning as only relative paths are supported")
-        self.path = safe_str(path.rstrip('/'))  # we store paths as str
+        self.path = path.rstrip('/')
         if path == '' and kind != NodeKind.DIR:
             raise NodeError("Only DirNode and its subclasses may be "
                             "initialized with empty path")
@@ -120,67 +121,34 @@
         return None
 
     @LazyProperty
-    def unicode_path(self):
-        return safe_unicode(self.path)
-
-    @LazyProperty
     def name(self):
         """
         Returns name of the node so if its path
         then only last part is returned.
         """
-        return safe_unicode(self.path.rstrip('/').split('/')[-1])
-
-    def _get_kind(self):
-        return self._kind
-
-    def _set_kind(self, kind):
-        if hasattr(self, '_kind'):
-            raise NodeError("Cannot change node's kind")
-        else:
-            self._kind = kind
-            # Post setter check (path's trailing slash)
-            if self.path.endswith('/'):
-                raise NodeError("Node's path cannot end with slash")
-
-    kind = property(_get_kind, _set_kind)
-
-    def __cmp__(self, other):
-        """
-        Comparator using name of the node, needed for quick list sorting.
-        """
-        kind_cmp = cmp(self.kind, other.kind)
-        if kind_cmp:
-            return kind_cmp
-        return cmp(self.name, other.name)
+        return self.path.rstrip('/').split('/')[-1]
 
     def __eq__(self, other):
-        for attr in ['name', 'path', 'kind']:
-            if getattr(self, attr) != getattr(other, attr):
-                return False
-        if self.is_file():
-            if self.content != other.content:
-                return False
-        else:
-            # For DirNode's check without entering each dir
-            self_nodes_paths = list(sorted(n.path for n in self.nodes))
-            other_nodes_paths = list(sorted(n.path for n in self.nodes))
-            if self_nodes_paths != other_nodes_paths:
-                return False
-        return True
+        if type(self) is not type(other):
+            return False
+        if self.kind != other.kind:
+            return False
+        if self.path != other.path:
+            return False
 
-    def __nq__(self, other):
-        return not self.__eq__(other)
+    def __lt__(self, other):
+        if self.kind < other.kind:
+            return True
+        if self.kind > other.kind:
+            return False
+        if self.path < other.path:
+            return True
+        if self.path > other.path:
+            return False
 
     def __repr__(self):
         return '<%s %r>' % (self.__class__.__name__, self.path)
 
-    def __str__(self):
-        return self.__repr__()
-
-    def __unicode__(self):
-        return self.name
-
     def get_parent_path(self):
         """
         Returns node's parent path or empty string if node is root.
@@ -258,8 +226,24 @@
             raise NodeError("Cannot use both content and changeset")
         super(FileNode, self).__init__(path, kind=NodeKind.FILE)
         self.changeset = changeset
+        if not isinstance(content, bytes) and content is not None:
+            # File content is one thing that inherently must be bytes ... but
+            # VCS module tries to be "user friendly" and support unicode ...
+            content = safe_bytes(content)
         self._content = content
-        self._mode = mode or 0100644
+        self._mode = mode or 0o100644
+
+    def __eq__(self, other):
+        eq = super(FileNode, self).__eq__(other)
+        if eq is not None:
+            return eq
+        return self.content == other.content
+
+    def __lt__(self, other):
+        lt = super(FileNode, self).__lt__(other)
+        if lt is not None:
+            return lt
+        return self.content < other.content
 
     @LazyProperty
     def mode(self):
@@ -273,25 +257,17 @@
             mode = self._mode
         return mode
 
-    def _get_content(self):
+    @property
+    def content(self):
+        """
+        Returns lazily byte content of the FileNode.
+        """
         if self.changeset:
             content = self.changeset.get_file_content(self.path)
         else:
             content = self._content
         return content
 
-    @property
-    def content(self):
-        """
-        Returns lazily content of the FileNode. If possible, would try to
-        decode content from UTF-8.
-        """
-        content = self._get_content()
-
-        if bool(content and '\0' in content):
-            return content
-        return safe_unicode(content)
-
     @LazyProperty
     def size(self):
         if self.changeset:
@@ -361,7 +337,7 @@
         """
         from pygments import lexers
         try:
-            lexer = lexers.guess_lexer_for_filename(self.name, self.content, stripnl=False)
+            lexer = lexers.guess_lexer_for_filename(self.name, safe_str(self.content), stripnl=False)
         except lexers.ClassNotFound:
             lexer = lexers.TextLexer(stripnl=False)
         # returns first alias
@@ -409,8 +385,7 @@
         """
         Returns True if file has binary content.
         """
-        _bin = '\0' in self._get_content()
-        return _bin
+        return b'\0' in self.content
 
     def is_browser_compatible_image(self):
         return self.mimetype in [
@@ -488,10 +463,23 @@
         self.changeset = changeset
         self._nodes = nodes
 
-    @LazyProperty
-    def content(self):
-        raise NodeError("%s represents a dir and has no ``content`` attribute"
-            % self)
+    def __eq__(self, other):
+        eq = super(DirNode, self).__eq__(other)
+        if eq is not None:
+            return eq
+        # check without entering each dir
+        self_nodes_paths = list(sorted(n.path for n in self.nodes))
+        other_nodes_paths = list(sorted(n.path for n in self.nodes))
+        return self_nodes_paths == other_nodes_paths
+
+    def __lt__(self, other):
+        lt = super(DirNode, self).__lt__(other)
+        if lt is not None:
+            return lt
+        # check without entering each dir
+        self_nodes_paths = list(sorted(n.path for n in self.nodes))
+        other_nodes_paths = list(sorted(n.path for n in self.nodes))
+        return self_nodes_paths < other_nodes_paths
 
     @LazyProperty
     def nodes(self):
@@ -595,12 +583,13 @@
     size = 0
 
     def __init__(self, name, url, changeset=None, alias=None):
-        self.path = name
+        # Note: Doesn't call Node.__init__!
+        self.path = name.rstrip('/')
         self.kind = NodeKind.SUBMODULE
         self.alias = alias
         # we have to use emptyChangeset here since this can point to svn/git/hg
         # submodules we cannot get from repository
-        self.changeset = EmptyChangeset(str(changeset), alias=alias)
+        self.changeset = EmptyChangeset(changeset, alias=alias)
         self.url = url
 
     def __repr__(self):
@@ -613,5 +602,5 @@
         Returns name of the node so if its path
         then only last part is returned.
         """
-        org = safe_unicode(self.path.rstrip('/').split('/')[-1])
-        return u'%s @ %s' % (org, self.changeset.short_id)
+        org = self.path.rstrip('/').rsplit('/', 1)[-1]
+        return '%s @ %s' % (org, self.changeset.short_id)
--- a/kallithea/lib/vcs/subprocessio.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/subprocessio.py	Sat May 02 21:20:43 2020 +0200
@@ -44,7 +44,7 @@
         if type(source) in (type(''), bytes, bytearray):  # string-like
             self.bytes = bytes(source)
         else:  # can be either file pointer or file-like
-            if type(source) in (int, long):  # file pointer it is
+            if isinstance(source, int):  # file pointer it is
                 # converting file descriptor (int) stdin into file-like
                 source = os.fdopen(source, 'rb', 16384)
             # let's see if source is file-like by now
@@ -125,11 +125,7 @@
             if len(t) > ccm:
                 kr.clear()
                 kr.wait(2)
-                # # this only works on 2.7.x and up
-                # if not kr.wait(10):
-                #     raise Exception("Timed out while waiting for input to be read.")
-                # instead we'll use this
-                if len(t) > ccm + 3:
+                if not kr.wait(10):
                     raise IOError(
                         "Timed out while waiting for input from subprocess.")
             t.append(b)
@@ -178,7 +174,7 @@
     def __iter__(self):
         return self
 
-    def next(self):
+    def __next__(self):
         while not len(self.data) and not self.worker.EOF.is_set():
             self.worker.data_added.clear()
             self.worker.data_added.wait(0.2)
@@ -225,17 +221,6 @@
         return not self.worker.keep_reading.is_set()
 
     @property
-    def done_reading_event(self):
-        """
-        Done_reading does not mean that the iterator's buffer is empty.
-        Iterator might have done reading from underlying source, but the read
-        chunks might still be available for serving through .next() method.
-
-        :returns: An threading.Event class instance.
-        """
-        return self.worker.EOF
-
-    @property
     def done_reading(self):
         """
         Done_reading does not mean that the iterator's buffer is empty.
@@ -286,7 +271,7 @@
 
     - We are multithreaded. Writing in and reading out, err are all sep threads.
     - We support concurrent (in and out) stream processing.
-    - The output is not a stream. It's a queue of read string (bytes, not unicode)
+    - The output is not a stream. It's a queue of read string (bytes, not str)
       chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
     - We are non-blocking in more respects than communicate()
       (reading from subprocess out pauses when internal buffer is full, but
@@ -367,18 +352,17 @@
             and returncode != 0
         ): # and it failed
             bg_out.stop()
-            out = ''.join(bg_out)
+            out = b''.join(bg_out)
             bg_err.stop()
-            err = ''.join(bg_err)
-            if (err.strip() == 'fatal: The remote end hung up unexpectedly' and
-                out.startswith('0034shallow ')
+            err = b''.join(bg_err)
+            if (err.strip() == b'fatal: The remote end hung up unexpectedly' and
+                out.startswith(b'0034shallow ')
             ):
                 # hack inspired by https://github.com/schacon/grack/pull/7
                 bg_out = iter([out])
                 _p = None
             elif err:
-                raise EnvironmentError(
-                    "Subprocess exited due to an error:\n" + err)
+                raise EnvironmentError("Subprocess exited due to an error: %s" % err)
             else:
                 raise EnvironmentError(
                     "Subprocess exited with non 0 ret code: %s" % returncode)
@@ -390,7 +374,7 @@
     def __iter__(self):
         return self
 
-    def next(self):
+    def __next__(self):
         if self.process:
             returncode = self.process.poll()
             if (returncode is not None # process has terminated
@@ -400,7 +384,7 @@
                 self.error.stop()
                 err = ''.join(self.error)
                 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
-        return self.output.next()
+        return next(self.output)
 
     def throw(self, type, value=None, traceback=None):
         if self.output.length or not self.output.done_reading:
--- a/kallithea/lib/vcs/utils/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
 """
 This module provides some useful tools for ``vcs`` like annotate/diff html
 output. It also includes some internal helpers.
@@ -25,7 +27,7 @@
     :param sep:
     :param strip:
     """
-    if isinstance(obj, (basestring)):
+    if isinstance(obj, str):
         lst = obj.split(sep)
         if strip:
             lst = [v.strip() for v in lst]
@@ -66,89 +68,107 @@
     return val
 
 
-def safe_unicode(str_, from_encoding=None):
+def safe_str(s):
     """
-    safe unicode function. Does few trick to turn str_ into unicode
-
-    In case of UnicodeDecode error we try to return it with encoding detected
-    by chardet library if it fails fallback to unicode with errors replaced
-
-    :param str_: string to decode
-    :rtype: unicode
-    :returns: unicode object
+    Safe unicode str function. Use a few tricks to turn s into str:
+    In case of UnicodeDecodeError with configured default encodings, try to
+    detect encoding with chardet library, then fall back to first encoding with
+    errors replaced.
     """
-    if isinstance(str_, unicode):
-        return str_
+    if isinstance(s, str):
+        return s
 
-    if not from_encoding:
-        from kallithea.lib.vcs.conf import settings
-        from_encoding = settings.DEFAULT_ENCODINGS
-
-    if not isinstance(from_encoding, (list, tuple)):
-        from_encoding = [from_encoding]
+    if not isinstance(s, bytes):  # use __str__ and don't expect UnicodeDecodeError
+        return str(s)
 
-    try:
-        return unicode(str_)
-    except UnicodeDecodeError:
-        pass
-
-    for enc in from_encoding:
+    from kallithea.lib.vcs.conf import settings
+    for enc in settings.DEFAULT_ENCODINGS:
         try:
-            return unicode(str_, enc)
+            return str(s, enc)
         except UnicodeDecodeError:
             pass
 
     try:
         import chardet
-        encoding = chardet.detect(str_)['encoding']
-        if encoding is None:
-            raise Exception()
-        return str_.decode(encoding)
-    except (ImportError, UnicodeDecodeError, Exception):
-        return unicode(str_, from_encoding[0], 'replace')
+        encoding = chardet.detect(s)['encoding']
+        if encoding is not None:
+            return s.decode(encoding)
+    except (ImportError, UnicodeDecodeError):
+        pass
+
+    return str(s, settings.DEFAULT_ENCODINGS[0], 'replace')
 
 
-def safe_str(unicode_, to_encoding=None):
+def safe_bytes(s):
     """
-    safe str function. Does few trick to turn unicode_ into string
-
-    In case of UnicodeEncodeError we try to return it with encoding detected
-    by chardet library if it fails fallback to string with errors replaced
-
-    :param unicode_: unicode to encode
-    :rtype: str
-    :returns: str object
+    Safe bytes function. Use a few tricks to turn s into bytes string:
+    In case of UnicodeEncodeError with configured default encodings, fall back
+    to first configured encoding with errors replaced.
     """
+    if isinstance(s, bytes):
+        return s
 
-    # if it's not basestr cast to str
-    if not isinstance(unicode_, basestring):
-        return str(unicode_)
-
-    if isinstance(unicode_, str):
-        return unicode_
+    assert isinstance(s, str), repr(s)  # bytes cannot coerse with __str__ or handle None or int
 
-    if not to_encoding:
-        from kallithea.lib.vcs.conf import settings
-        to_encoding = settings.DEFAULT_ENCODINGS
-
-    if not isinstance(to_encoding, (list, tuple)):
-        to_encoding = [to_encoding]
-
-    for enc in to_encoding:
+    from kallithea.lib.vcs.conf import settings
+    for enc in settings.DEFAULT_ENCODINGS:
         try:
-            return unicode_.encode(enc)
+            return s.encode(enc)
         except UnicodeEncodeError:
             pass
 
-    try:
-        import chardet
-        encoding = chardet.detect(unicode_)['encoding']
-        if encoding is None:
-            raise UnicodeEncodeError()
+    return s.encode(settings.DEFAULT_ENCODINGS[0], 'replace')
+
+
+def ascii_bytes(s):
+    """
+    Simple conversion from str to bytes, *assuming* all codepoints are
+    7-bit and it thus is pure ASCII.
+    Will fail badly with UnicodeError on invalid input.
+    This should be used where enocding and "safe" ambiguity should be avoided.
+    Where strings already have been encoded in other ways but still are unicode
+    string - for example to hex, base64, json, urlencoding, or are known to be
+    identifiers.
 
-        return unicode_.encode(encoding)
-    except (ImportError, UnicodeEncodeError):
-        return unicode_.encode(to_encoding[0], 'replace')
+    >>> ascii_bytes('a')
+    b'a'
+    >>> ascii_bytes(u'a')
+    b'a'
+    >>> ascii_bytes('å')
+    Traceback (most recent call last):
+    UnicodeEncodeError: 'ascii' codec can't encode character '\xe5' in position 0: ordinal not in range(128)
+    >>> ascii_bytes('å'.encode('utf8'))
+    Traceback (most recent call last):
+    AssertionError: b'\xc3\xa5'
+    """
+    assert isinstance(s, str), repr(s)
+    return s.encode('ascii')
+
+
+def ascii_str(s):
+    r"""
+    Simple conversion from bytes to str, *assuming* all codepoints are
+    7-bit and it thus is pure ASCII.
+    Will fail badly with UnicodeError on invalid input.
+    This should be used where enocding and "safe" ambiguity should be avoided.
+    Where strings are encoded but also in other ways are known to be ASCII, and
+    where a unicode string is wanted without caring about encoding. For example
+    to hex, base64, urlencoding, or are known to be identifiers.
+
+    >>> ascii_str(b'a')
+    'a'
+    >>> ascii_str(u'a')
+    Traceback (most recent call last):
+    AssertionError: 'a'
+    >>> ascii_str('å'.encode('utf8'))
+    Traceback (most recent call last):
+    UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 0: ordinal not in range(128)
+    >>> ascii_str(u'å')
+    Traceback (most recent call last):
+    AssertionError: 'å'
+    """
+    assert isinstance(s, bytes), repr(s)
+    return s.decode('ascii')
 
 
 # Regex taken from http://www.regular-expressions.info/email.html
@@ -178,7 +198,7 @@
     m = email_re.search(author)
     if m is None:
         return ''
-    return safe_str(m.group(0))
+    return m.group(0)
 
 
 def author_name(author):
--- a/kallithea/lib/vcs/utils/annotate.py	Thu Apr 09 18:03:56 2020 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,179 +0,0 @@
-import StringIO
-
-from pygments import highlight
-from pygments.formatters import HtmlFormatter
-
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.nodes import FileNode
-
-
-def annotate_highlight(filenode, annotate_from_changeset_func=None,
-        order=None, headers=None, **options):
-    """
-    Returns html portion containing annotated table with 3 columns: line
-    numbers, changeset information and pygmentized line of code.
-
-    :param filenode: FileNode object
-    :param annotate_from_changeset_func: function taking changeset and
-      returning single annotate cell; needs break line at the end
-    :param order: ordered sequence of ``ls`` (line numbers column),
-      ``annotate`` (annotate column), ``code`` (code column); Default is
-      ``['ls', 'annotate', 'code']``
-    :param headers: dictionary with headers (keys are whats in ``order``
-      parameter)
-    """
-    options['linenos'] = True
-    formatter = AnnotateHtmlFormatter(filenode=filenode, order=order,
-        headers=headers,
-        annotate_from_changeset_func=annotate_from_changeset_func, **options)
-    lexer = filenode.lexer
-    highlighted = highlight(filenode.content, lexer, formatter)
-    return highlighted
-
-
-class AnnotateHtmlFormatter(HtmlFormatter):
-
-    def __init__(self, filenode, annotate_from_changeset_func=None,
-            order=None, **options):
-        """
-        If ``annotate_from_changeset_func`` is passed it should be a function
-        which returns string from the given changeset. For example, we may pass
-        following function as ``annotate_from_changeset_func``::
-
-            def changeset_to_anchor(changeset):
-                return '<a href="/changesets/%s/">%s</a>\n' % \
-                       (changeset.id, changeset.id)
-
-        :param annotate_from_changeset_func: see above
-        :param order: (default: ``['ls', 'annotate', 'code']``); order of
-          columns;
-        :param options: standard pygment's HtmlFormatter options, there is
-          extra option tough, ``headers``. For instance we can pass::
-
-             formatter = AnnotateHtmlFormatter(filenode, headers={
-                'ls': '#',
-                'annotate': 'Annotate',
-                'code': 'Code',
-             })
-
-        """
-        super(AnnotateHtmlFormatter, self).__init__(**options)
-        self.annotate_from_changeset_func = annotate_from_changeset_func
-        self.order = order or ('ls', 'annotate', 'code')
-        headers = options.pop('headers', None)
-        if headers and not ('ls' in headers and 'annotate' in headers and
-            'code' in headers
-        ):
-            raise ValueError("If headers option dict is specified it must "
-                "all 'ls', 'annotate' and 'code' keys")
-        self.headers = headers
-        if isinstance(filenode, FileNode):
-            self.filenode = filenode
-        else:
-            raise VCSError("This formatter expect FileNode parameter, not %r"
-                % type(filenode))
-
-    def annotate_from_changeset(self, changeset):
-        """
-        Returns full html line for single changeset per annotated line.
-        """
-        if self.annotate_from_changeset_func:
-            return self.annotate_from_changeset_func(changeset)
-        else:
-            return ''.join((changeset.id, '\n'))
-
-    def _wrap_tablelinenos(self, inner):
-        dummyoutfile = StringIO.StringIO()
-        lncount = 0
-        for t, line in inner:
-            if t:
-                lncount += 1
-            dummyoutfile.write(line)
-
-        fl = self.linenostart
-        mw = len(str(lncount + fl - 1))
-        sp = self.linenospecial
-        st = self.linenostep
-        la = self.lineanchors
-        aln = self.anchorlinenos
-        if sp:
-            lines = []
-
-            for i in range(fl, fl + lncount):
-                if i % st == 0:
-                    if i % sp == 0:
-                        if aln:
-                            lines.append('<a href="#%s-%d" class="special">'
-                                         '%*d</a>' %
-                                         (la, i, mw, i))
-                        else:
-                            lines.append('<span class="special">'
-                                         '%*d</span>' % (mw, i))
-                    else:
-                        if aln:
-                            lines.append('<a href="#%s-%d">'
-                                         '%*d</a>' % (la, i, mw, i))
-                        else:
-                            lines.append('%*d' % (mw, i))
-                else:
-                    lines.append('')
-            ls = '\n'.join(lines)
-        else:
-            lines = []
-            for i in range(fl, fl + lncount):
-                if i % st == 0:
-                    if aln:
-                        lines.append('<a href="#%s-%d">%*d</a>'
-                                     % (la, i, mw, i))
-                    else:
-                        lines.append('%*d' % (mw, i))
-                else:
-                    lines.append('')
-            ls = '\n'.join(lines)
-
-        annotate_changesets = [tup[1] for tup in self.filenode.annotate]
-        # If pygments cropped last lines break we need do that too
-        ln_cs = len(annotate_changesets)
-        ln_ = len(ls.splitlines())
-        if ln_cs > ln_:
-            annotate_changesets = annotate_changesets[:ln_ - ln_cs]
-        annotate = ''.join((self.annotate_from_changeset(changeset)
-            for changeset in annotate_changesets))
-        # in case you wonder about the seemingly redundant <div> here:
-        # since the content in the other cell also is wrapped in a div,
-        # some browsers in some configurations seem to mess up the formatting.
-        '''
-        yield 0, ('<table class="%stable">' % self.cssclass +
-                  '<tr><td class="linenos"><div class="linenodiv"><pre>' +
-                  ls + '</pre></div></td>' +
-                  '<td class="code">')
-        yield 0, dummyoutfile.getvalue()
-        yield 0, '</td></tr></table>'
-
-        '''
-        headers_row = []
-        if self.headers:
-            headers_row = ['<tr class="annotate-header">']
-            for key in self.order:
-                td = ''.join(('<td>', self.headers[key], '</td>'))
-                headers_row.append(td)
-            headers_row.append('</tr>')
-
-        body_row_start = ['<tr>']
-        for key in self.order:
-            if key == 'ls':
-                body_row_start.append(
-                    '<td class="linenos"><div class="linenodiv"><pre>' +
-                    ls + '</pre></div></td>')
-            elif key == 'annotate':
-                body_row_start.append(
-                    '<td class="annotate"><div class="annotatediv"><pre>' +
-                    annotate + '</pre></div></td>')
-            elif key == 'code':
-                body_row_start.append('<td class="code">')
-        yield 0, ('<table class="%stable">' % self.cssclass +
-                  ''.join(headers_row) +
-                  ''.join(body_row_start)
-                  )
-        yield 0, dummyoutfile.getvalue()
-        yield 0, '</td></tr></table>'
--- a/kallithea/lib/vcs/utils/archivers.py	Thu Apr 09 18:03:56 2020 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,67 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    vcs.utils.archivers
-    ~~~~~~~~~~~~~~~~~~~
-
-    set of archiver functions for creating archives from repository content
-
-    :created_on: Jan 21, 2011
-    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
-"""
-
-
-class BaseArchiver(object):
-
-    def __init__(self):
-        self.archive_file = self._get_archive_file()
-
-    def addfile(self):
-        """
-        Adds a file to archive container
-        """
-        pass
-
-    def close(self):
-        """
-        Closes and finalizes operation of archive container object
-        """
-        self.archive_file.close()
-
-    def _get_archive_file(self):
-        """
-        Returns container for specific archive
-        """
-        raise NotImplementedError()
-
-
-class TarArchiver(BaseArchiver):
-    pass
-
-
-class Tbz2Archiver(BaseArchiver):
-    pass
-
-
-class TgzArchiver(BaseArchiver):
-    pass
-
-
-class ZipArchiver(BaseArchiver):
-    pass
-
-
-def get_archiver(self, kind):
-    """
-    Returns instance of archiver class specific to given kind
-
-    :param kind: archive kind
-    """
-
-    archivers = {
-        'tar': TarArchiver,
-        'tbz2': Tbz2Archiver,
-        'tgz': TgzArchiver,
-        'zip': ZipArchiver,
-    }
-
-    return archivers[kind]()
--- a/kallithea/lib/vcs/utils/fakemod.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/fakemod.py	Sat May 02 21:20:43 2020 +0200
@@ -9,5 +9,5 @@
     """
     module = imp.new_module(name)
     module.__file__ = path
-    execfile(path, module.__dict__)
+    exec(compile(open(path, "rb").read(), path, 'exec'), module.__dict__)
     return module
--- a/kallithea/lib/vcs/utils/helpers.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/helpers.py	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,6 @@
 """
 Utilities aimed to help achieve mostly basic tasks.
 """
-from __future__ import division
 
 import datetime
 import os
@@ -33,16 +32,14 @@
     if not os.path.isdir(path):
         raise VCSError("Given path %s is not a directory" % path)
 
-    def get_scms(path):
-        return [(scm, path) for scm in get_scms_for_path(path)]
-
-    found_scms = get_scms(path)
-    while not found_scms and search_up:
+    while True:
+        found_scms = [(scm, path) for scm in get_scms_for_path(path)]
+        if found_scms or not search_up:
+            break
         newpath = abspath(path, '..')
         if newpath == path:
             break
         path = newpath
-        found_scms = get_scms(path)
 
     if len(found_scms) > 1:
         for scm in found_scms:
@@ -133,7 +130,7 @@
         >>> parse_changesets('aaabbb')
         {'start': None, 'main': 'aaabbb', 'end': None}
         >>> parse_changesets('aaabbb..cccddd')
-        {'start': 'aaabbb', 'main': None, 'end': 'cccddd'}
+        {'start': 'aaabbb', 'end': 'cccddd', 'main': None}
 
     """
     text = text.strip()
--- a/kallithea/lib/vcs/utils/hgcompat.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/hgcompat.py	Sat May 02 21:20:43 2020 +0200
@@ -2,46 +2,16 @@
 Mercurial libs compatibility
 """
 
-# Mercurial 5.0 550a172a603b renamed memfilectx argument `copied` to `copysource`
-import inspect
-
-import mercurial
-from mercurial import archival, config, demandimport, discovery, httppeer, localrepo
-from mercurial import merge as hg_merge
-from mercurial import obsutil, patch, scmutil, sshpeer, ui, unionrepo
-from mercurial.commands import clone, nullid, pull
-from mercurial.context import memctx, memfilectx
-from mercurial.discovery import findcommonoutgoing
-from mercurial.encoding import tolocal
-from mercurial.error import Abort, RepoError, RepoLookupError
-from mercurial.hg import peer
-from mercurial.hgweb import hgweb_mod
-from mercurial.hgweb.common import get_contact
-from mercurial.match import exact as match_exact
-from mercurial.match import match
-from mercurial.mdiff import diffopts
-from mercurial.node import hex, nullrev
-from mercurial.scmutil import revrange
-from mercurial.tags import tag
-from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
-from mercurial.util import url as hg_url
+import mercurial.encoding
+import mercurial.localrepo
 
 
-# patch demandimport, due to bug in mercurial when it always triggers demandimport.enable()
-demandimport.enable = lambda *args, **kwargs: 1
-
-
-# workaround for 3.3 94ac64bcf6fe and not calling largefiles reposetup correctly
-localrepo.localrepository._lfstatuswriters = [lambda *msg, **opts: None]
-# 3.5 7699d3212994 added the invariant that repo.lfstatus must exist before hitting overridearchive
-localrepo.localrepository.lfstatus = False
+def monkey_do():
+    """Apply some Mercurial monkey patching"""
+    # workaround for 3.3 94ac64bcf6fe and not calling largefiles reposetup correctly, and test_archival failing
+    mercurial.localrepo.localrepository._lfstatuswriters = [lambda *msg, **opts: None]
+    # 3.5 7699d3212994 added the invariant that repo.lfstatus must exist before hitting overridearchive
+    mercurial.localrepo.localrepository.lfstatus = False
 
-if inspect.getargspec(memfilectx.__init__).args[7] != 'copysource':
-    assert inspect.getargspec(memfilectx.__init__).args[7] == 'copied', inspect.getargspec(memfilectx.__init__).args
-    __org_memfilectx_ = memfilectx
-    memfilectx = lambda repo, changectx, path, data, islink=False, isexec=False, copysource=None: \
-        __org_memfilectx_(repo, changectx, path, data, islink=islink, isexec=isexec, copied=copysource)
-
-# Mercurial 5.0 dropped exact argument for match in 635a12c53ea6, and 0531dff73d0b made the exact function stable with a single parameter
-if inspect.getargspec(match_exact).args[0] != 'files':
-    match_exact = lambda path: match(None, '', [path], exact=True)
+    # Minimize potential impact from custom configuration
+    mercurial.encoding.environ[b'HGPLAIN'] = b'1'
--- a/kallithea/lib/vcs/utils/imports.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/imports.py	Sat May 02 21:20:43 2020 +0200
@@ -1,6 +1,3 @@
-from kallithea.lib.vcs.exceptions import VCSError
-
-
 def import_class(class_path):
     """
     Returns class from the given path.
@@ -8,10 +5,7 @@
     For example, in order to get class located at
     ``vcs.backends.hg.MercurialRepository``:
 
-        try:
-            hgrepo = import_class('vcs.backends.hg.MercurialRepository')
-        except VCSError:
-            # handle error
+        hgrepo = import_class('vcs.backends.hg.MercurialRepository')
     """
     splitted = class_path.split('.')
     mod_path = '.'.join(splitted[:-1])
--- a/kallithea/lib/vcs/utils/lazy.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/lazy.py	Sat May 02 21:20:43 2020 +0200
@@ -1,6 +1,3 @@
-import threading
-
-
 class _Missing(object):
 
     def __repr__(self):
@@ -44,21 +41,3 @@
             value = self._func(obj)
             obj.__dict__[self.__name__] = value
         return value
-
-
-class ThreadLocalLazyProperty(LazyProperty):
-    """
-    Same as above but uses thread local dict for cache storage.
-    """
-
-    def __get__(self, obj, klass=None):
-        if obj is None:
-            return self
-        if not hasattr(obj, '__tl_dict__'):
-            obj.__tl_dict__ = threading.local().__dict__
-
-        value = obj.__tl_dict__.get(self.__name__, _missing)
-        if value is _missing:
-            value = self._func(obj)
-            obj.__tl_dict__[self.__name__] = value
-        return value
--- a/kallithea/lib/vcs/utils/paths.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/paths.py	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,8 @@
 import os
 
 
-abspath = lambda * p: os.path.abspath(os.path.join(*p))
+def abspath(*p):
+    return os.path.abspath(os.path.join(*p))
 
 
 def get_dirs_for_path(*paths):
@@ -11,7 +12,7 @@
     for path in paths:
         head = path
         while head:
-            head, tail = os.path.split(head)
+            head, _tail = os.path.split(head)
             if head:
                 yield head
             else:
--- a/kallithea/lib/vcs/utils/progressbar.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/progressbar.py	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,5 @@
 # encoding: UTF-8
 
-from __future__ import print_function
-
 import datetime
 import string
 import sys
@@ -43,7 +41,7 @@
     def __iter__(self):
         start = self.step
         end = self.steps + 1
-        for x in xrange(start, end):
+        for x in range(start, end):
             self.render(x)
             yield x
 
@@ -215,7 +213,7 @@
     code_list = []
     if text == '' and len(opts) == 1 and opts[0] == 'reset':
         return '\x1b[%sm' % RESET
-    for k, v in kwargs.iteritems():
+    for k, v in kwargs.items():
         if k == 'fg':
             code_list.append(foreground[v])
         elif k == 'bg':
@@ -359,7 +357,7 @@
 
     print("Standard progress bar...")
     bar = ProgressBar(30)
-    for x in xrange(1, 31):
+    for x in range(1, 31):
         bar.render(x)
         time.sleep(0.02)
     bar.stream.write('\n')
@@ -410,7 +408,7 @@
     bar.width = 50
     bar.elements.remove('steps')
     bar.elements += ['transfer', 'time', 'eta', 'speed']
-    for x in xrange(0, bar.steps, 1024):
+    for x in range(0, bar.steps, 1024):
         bar.render(x)
         time.sleep(0.01)
         now = datetime.datetime.now()
--- a/kallithea/lib/vcs/utils/termcolors.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/lib/vcs/utils/termcolors.py	Sat May 02 21:20:43 2020 +0200
@@ -44,7 +44,7 @@
     code_list = []
     if text == '' and len(opts) == 1 and opts[0] == 'reset':
         return '\x1b[%sm' % RESET
-    for k, v in kwargs.iteritems():
+    for k, v in kwargs.items():
         if k == 'fg':
             code_list.append(foreground[v])
         elif k == 'bg':
@@ -188,7 +188,7 @@
                 definition['bg'] = colors[-1]
 
             # All remaining instructions are options
-            opts = tuple(s for s in styles if s in opt_dict.keys())
+            opts = tuple(s for s in styles if s in opt_dict)
             if opts:
                 definition['opts'] = opts
 
--- a/kallithea/model/comment.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/comment.py	Sat May 02 21:20:43 2020 +0200
@@ -31,7 +31,7 @@
 from tg.i18n import ugettext as _
 
 from kallithea.lib import helpers as h
-from kallithea.lib.utils2 import extract_mentioned_users, safe_unicode
+from kallithea.lib.utils2 import extract_mentioned_users
 from kallithea.model.db import ChangesetComment, PullRequest, Repository, User
 from kallithea.model.meta import Session
 from kallithea.model.notification import NotificationModel
@@ -81,11 +81,10 @@
                 repo_name=repo.repo_name,
                 revision=revision,
                 anchor='comment-%s' % comment.comment_id)
-            subj = safe_unicode(
-                h.link_to('Re changeset: %(desc)s %(line)s' %
+            subj = h.link_to(
+                'Re changeset: %(desc)s %(line)s' %
                           {'desc': desc, 'line': line},
-                          comment_url)
-            )
+                 comment_url)
             # get the current participants of this changeset
             recipients = _list_changeset_commenters(revision)
             # add changeset author if it's known locally
@@ -127,13 +126,12 @@
                                                           h.canonical_hostname()))
             comment_url = pull_request.url(canonical=True,
                 anchor='comment-%s' % comment.comment_id)
-            subj = safe_unicode(
-                h.link_to('Re pull request %(pr_nice_id)s: %(desc)s %(line)s' %
+            subj = h.link_to(
+                'Re pull request %(pr_nice_id)s: %(desc)s %(line)s' %
                           {'desc': desc,
                            'pr_nice_id': comment.pull_request.nice_id(),
                            'line': line},
-                          comment_url)
-            )
+                comment_url)
             # get the current participants of this pull request
             recipients = _list_pull_request_commenters(pull_request)
             recipients.append(pull_request.owner)
@@ -257,7 +255,7 @@
         paths = defaultdict(lambda: defaultdict(list))
         for co in comments:
             paths[co.f_path][co.line_no].append(co)
-        return paths.items()
+        return sorted(paths.items())
 
     def _get_comments(self, repo_id, revision=None, pull_request=None,
                 inline=False, f_path=None, line_no=None):
--- a/kallithea/model/db.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/db.py	Sat May 02 21:20:43 2020 +0200
@@ -25,6 +25,7 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
+import base64
 import collections
 import datetime
 import functools
@@ -36,22 +37,20 @@
 
 import ipaddr
 import sqlalchemy
-from beaker.cache import cache_region, region_invalidate
-from sqlalchemy import *
+from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Index, Integer, LargeBinary, String, Unicode, UnicodeText, UniqueConstraint
 from sqlalchemy.ext.hybrid import hybrid_property
 from sqlalchemy.orm import class_mapper, joinedload, relationship, validates
 from tg.i18n import lazy_ugettext as _
 from webob.exc import HTTPNotFound
 
 import kallithea
-from kallithea.lib.caching_query import FromCache
-from kallithea.lib.compat import json
+from kallithea.lib import ext_json
 from kallithea.lib.exceptions import DefaultUserException
-from kallithea.lib.utils2 import Optional, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_int, safe_str, safe_unicode, str2bool, urlreadable
+from kallithea.lib.utils2 import (Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, str2bool,
+                                  urlreadable)
 from kallithea.lib.vcs import get_backend
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.utils.lazy import LazyProperty
 from kallithea.model.meta import Base, Session
 
 
@@ -62,7 +61,8 @@
 # BASE CLASSES
 #==============================================================================
 
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
+def _hash_key(k):
+    return hashlib.md5(safe_bytes(k)).hexdigest()
 
 
 class BaseDbModel(object):
@@ -73,6 +73,7 @@
     @classmethod
     def _get_keys(cls):
         """return column names for this model """
+        # Note: not a normal dict - iterator gives "users.firstname", but keys gives "firstname"
         return class_mapper(cls).c.keys()
 
     def get_dict(self):
@@ -90,7 +91,7 @@
             # update with attributes from __json__
             if callable(_json_attr):
                 _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
+            for k, val in _json_attr.items():
                 d[k] = val
         return d
 
@@ -135,8 +136,10 @@
             return None
         if isinstance(value, cls):
             return value
-        if isinstance(value, (int, long)) or safe_str(value).isdigit():
+        if isinstance(value, int):
             return cls.get(value)
+        if isinstance(value, str) and value.isdigit():
+            return cls.get(int(value))
         if callback is not None:
             return callback(value)
 
@@ -163,12 +166,6 @@
         Session().delete(obj)
 
     def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            try:
-                return safe_str(self.__unicode__())
-            except UnicodeDecodeError:
-                pass
         return '<DB:%s>' % (self.__class__.__name__)
 
 
@@ -185,9 +182,9 @@
     )
 
     SETTINGS_TYPES = {
-        'str': safe_str,
+        'str': safe_bytes,
         'int': safe_int,
-        'unicode': safe_unicode,
+        'unicode': safe_str,
         'bool': str2bool,
         'list': functools.partial(aslist, sep=',')
     }
@@ -205,7 +202,7 @@
 
     @validates('_app_settings_value')
     def validate_settings_value(self, key, val):
-        assert type(val) == unicode
+        assert isinstance(val, str)
         return val
 
     @hybrid_property
@@ -218,11 +215,9 @@
     @app_settings_value.setter
     def app_settings_value(self, val):
         """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
+        Setter that will always make sure we use str in app_settings_value
         """
-        self._app_settings_value = safe_unicode(val)
+        self._app_settings_value = safe_str(val)
 
     @hybrid_property
     def app_settings_type(self):
@@ -232,13 +227,13 @@
     def app_settings_type(self, val):
         if val not in self.SETTINGS_TYPES:
             raise Exception('type must be one of %s got %s'
-                            % (self.SETTINGS_TYPES.keys(), val))
+                            % (list(self.SETTINGS_TYPES), val))
         self._app_settings_type = val
 
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
+    def __repr__(self):
+        return "<%s %s.%s=%r>" % (
             self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value, self.app_settings_type
+            self.app_settings_name, self.app_settings_type, self.app_settings_value
         )
 
     @classmethod
@@ -281,13 +276,9 @@
         return res
 
     @classmethod
-    def get_app_settings(cls, cache=False):
+    def get_app_settings(cls):
 
         ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
         if ret is None:
             raise Exception('Could not get application settings !')
         settings = {}
@@ -298,7 +289,7 @@
         return settings
 
     @classmethod
-    def get_auth_settings(cls, cache=False):
+    def get_auth_settings(cls):
         ret = cls.query() \
                 .filter(cls.app_settings_name.startswith('auth_')).all()
         fd = {}
@@ -307,7 +298,7 @@
         return fd
 
     @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
+    def get_default_repo_settings(cls, strip_prefix=False):
         ret = cls.query() \
                 .filter(cls.app_settings_name.startswith('default_')).all()
         fd = {}
@@ -328,9 +319,9 @@
         info = {
             'modules': sorted(mods, key=lambda k: k[0].lower()),
             'py_version': platform.python_version(),
-            'platform': safe_unicode(platform.platform()),
+            'platform': platform.platform(),
             'kallithea_version': kallithea.__version__,
-            'git_version': safe_unicode(check_git_version()),
+            'git_version': str(check_git_version()),
             'git_path': kallithea.CONFIG.get('git_path')
         }
         return info
@@ -339,9 +330,7 @@
 class Ui(Base, BaseDbModel):
     __tablename__ = 'ui'
     __table_args__ = (
-        # FIXME: ui_key as key is wrong and should be removed when the corresponding
-        # Ui.get_by_key has been replaced by the composite key
-        UniqueConstraint('ui_key'),
+        Index('ui_ui_section_ui_key_idx', 'ui_section', 'ui_key'),
         UniqueConstraint('ui_section', 'ui_key'),
         _table_args_default_dict,
     )
@@ -374,6 +363,7 @@
         q = cls.query()
         q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
         q = q.filter(cls.ui_section == 'hooks')
+        q = q.order_by(cls.ui_section, cls.ui_key)
         return q.all()
 
     @classmethod
@@ -381,6 +371,7 @@
         q = cls.query()
         q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
         q = q.filter(cls.ui_section == 'hooks')
+        q = q.order_by(cls.ui_section, cls.ui_key)
         return q.all()
 
     @classmethod
@@ -394,8 +385,9 @@
         new_ui.ui_value = val
 
     def __repr__(self):
-        return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
-                                    self.ui_key, self.ui_value)
+        return '<%s %s.%s=%r>' % (
+            self.__class__.__name__,
+            self.ui_section, self.ui_key, self.ui_value)
 
 
 class User(Base, BaseDbModel):
@@ -406,7 +398,7 @@
         _table_args_default_dict,
     )
 
-    DEFAULT_USER = 'default'
+    DEFAULT_USER_NAME = 'default'
     DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
     # The name of the default auth type in extern_type, 'internal' lives in auth_internal.py
     DEFAULT_AUTH_TYPE = 'internal'
@@ -512,7 +504,7 @@
 
     @hybrid_property
     def is_default_user(self):
-        return self.username == User.DEFAULT_USER
+        return self.username == User.DEFAULT_USER_NAME
 
     @hybrid_property
     def user_data(self):
@@ -520,20 +512,19 @@
             return {}
 
         try:
-            return json.loads(self._user_data)
+            return ext_json.loads(self._user_data)
         except TypeError:
             return {}
 
     @user_data.setter
     def user_data(self, val):
         try:
-            self._user_data = json.dumps(val)
+            self._user_data = ascii_bytes(ext_json.dumps(val))
         except Exception:
             log.error(traceback.format_exc())
 
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.user_id, self.username)
+    def __repr__(self):
+        return "<%s %s: %r>" % (self.__class__.__name__, self.user_id, self.username)
 
     @classmethod
     def guess_instance(cls, value):
@@ -551,7 +542,7 @@
         return user
 
     @classmethod
-    def get_by_username_or_email(cls, username_or_email, case_insensitive=False, cache=False):
+    def get_by_username_or_email(cls, username_or_email, case_insensitive=True):
         """
         For anything that looks like an email address, look up by the email address (matching
         case insensitively).
@@ -560,35 +551,24 @@
         This assumes no normal username can have '@' symbol.
         """
         if '@' in username_or_email:
-            return User.get_by_email(username_or_email, cache=cache)
+            return User.get_by_email(username_or_email)
         else:
-            return User.get_by_username(username_or_email, case_insensitive=case_insensitive, cache=cache)
+            return User.get_by_username(username_or_email, case_insensitive=case_insensitive)
 
     @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
+    def get_by_username(cls, username, case_insensitive=False):
         if case_insensitive:
-            q = cls.query().filter(func.lower(cls.username) == func.lower(username))
+            q = cls.query().filter(sqlalchemy.func.lower(cls.username) == sqlalchemy.func.lower(username))
         else:
             q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
         return q.scalar()
 
     @classmethod
-    def get_by_api_key(cls, api_key, cache=False, fallback=True):
+    def get_by_api_key(cls, api_key, fallback=True):
         if len(api_key) != 40 or not api_key.isalnum():
             return None
 
         q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
         res = q.scalar()
 
         if fallback and not res:
@@ -602,21 +582,13 @@
 
     @classmethod
     def get_by_email(cls, email, cache=False):
-        q = cls.query().filter(func.lower(cls.email) == func.lower(email))
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
+        q = cls.query().filter(sqlalchemy.func.lower(cls.email) == sqlalchemy.func.lower(email))
         ret = q.scalar()
         if ret is None:
             q = UserEmailMap.query()
             # try fetching in alternate email map
-            q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
+            q = q.filter(sqlalchemy.func.lower(UserEmailMap.email) == sqlalchemy.func.lower(email))
             q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
             ret = getattr(q.scalar(), 'user', None)
 
         return ret
@@ -654,8 +626,8 @@
         return user
 
     @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
+    def get_default_user(cls):
+        user = User.get_by_username(User.DEFAULT_USER_NAME)
         if user is None:
             raise Exception('Missing default account!')
         return user
@@ -772,9 +744,8 @@
           ip_range=self._get_ip_range(self.ip_addr)
         )
 
-    def __unicode__(self):
-        return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
-                                            self.user_id, self.ip_addr)
+    def __repr__(self):
+        return "<%s %s: %s>" % (self.__class__.__name__, self.user_id, self.ip_addr)
 
 
 class UserLog(Base, BaseDbModel):
@@ -792,10 +763,10 @@
     action = Column(UnicodeText(), nullable=False)
     action_date = Column(DateTime(timezone=False), nullable=False)
 
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
+    def __repr__(self):
+        return "<%s %r: %r>" % (self.__class__.__name__,
+                                  self.repository_name,
+                                  self.action)
 
     @property
     def action_as_day(self):
@@ -834,47 +805,37 @@
             return {}
 
         try:
-            return json.loads(self._group_data)
+            return ext_json.loads(self._group_data)
         except TypeError:
             return {}
 
     @group_data.setter
     def group_data(self, val):
         try:
-            self._group_data = json.dumps(val)
+            self._group_data = ascii_bytes(ext_json.dumps(val))
         except Exception:
             log.error(traceback.format_exc())
 
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
+    def __repr__(self):
+        return "<%s %s: %r>" % (self.__class__.__name__,
+                                  self.users_group_id,
+                                  self.users_group_name)
 
     @classmethod
     def guess_instance(cls, value):
         return super(UserGroup, cls).guess_instance(value, UserGroup.get_by_group_name)
 
     @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
+    def get_by_group_name(cls, group_name, case_insensitive=False):
         if case_insensitive:
-            q = cls.query().filter(func.lower(cls.users_group_name) == func.lower(group_name))
+            q = cls.query().filter(sqlalchemy.func.lower(cls.users_group_name) == sqlalchemy.func.lower(group_name))
         else:
             q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                          )
-            )
         return q.scalar()
 
     @classmethod
-    def get(cls, user_group_id, cache=False):
+    def get(cls, user_group_id):
         user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
         return user_group.get(user_group_id)
 
     def get_api_data(self, with_members=True):
@@ -962,9 +923,9 @@
     DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
     DEFAULT_CLONE_SSH = 'ssh://{system_user}@{hostname}/{repo}'
 
-    STATE_CREATED = u'repo_state_created'
-    STATE_PENDING = u'repo_state_pending'
-    STATE_ERROR = u'repo_state_error'
+    STATE_CREATED = 'repo_state_created'
+    STATE_PENDING = 'repo_state_pending'
+    STATE_ERROR = 'repo_state_error'
 
     repo_id = Column(Integer(), primary_key=True)
     repo_name = Column(Unicode(255), nullable=False, unique=True)
@@ -1009,9 +970,9 @@
                     primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
                     cascade="all, delete-orphan")
 
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
+    def __repr__(self):
+        return "<%s %s: %r>" % (self.__class__.__name__,
+                                  self.repo_id, self.repo_name)
 
     @hybrid_property
     def landing_rev(self):
@@ -1033,7 +994,7 @@
     @hybrid_property
     def changeset_cache(self):
         try:
-            cs_cache = json.loads(self._changeset_cache) # might raise on bad data
+            cs_cache = ext_json.loads(self._changeset_cache) # might raise on bad data
             cs_cache['raw_id'] # verify data, raise exception on error
             return cs_cache
         except (TypeError, KeyError, ValueError):
@@ -1042,7 +1003,7 @@
     @changeset_cache.setter
     def changeset_cache(self, val):
         try:
-            self._changeset_cache = json.dumps(val)
+            self._changeset_cache = ascii_bytes(ext_json.dumps(val))
         except Exception:
             log.error(traceback.format_exc())
 
@@ -1055,15 +1016,11 @@
         q = super(Repository, cls).query()
 
         if sorted:
-            q = q.order_by(func.lower(Repository.repo_name))
+            q = q.order_by(sqlalchemy.func.lower(Repository.repo_name))
 
         return q
 
     @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
     def normalize_repo_name(cls, repo_name):
         """
         Normalizes os specific repo_name to the format internally stored inside
@@ -1072,7 +1029,7 @@
         :param cls:
         :param repo_name:
         """
-        return cls.url_sep().join(repo_name.split(os.sep))
+        return URL_SEP.join(repo_name.split(os.sep))
 
     @classmethod
     def guess_instance(cls, value):
@@ -1083,7 +1040,7 @@
         """Get the repo, defaulting to database case sensitivity.
         case_insensitive will be slower and should only be specified if necessary."""
         if case_insensitive:
-            q = Session().query(cls).filter(func.lower(cls.repo_name) == func.lower(repo_name))
+            q = Session().query(cls).filter(sqlalchemy.func.lower(cls.repo_name) == sqlalchemy.func.lower(repo_name))
         else:
             q = Session().query(cls).filter(cls.repo_name == repo_name)
         q = q.options(joinedload(Repository.fork)) \
@@ -1093,7 +1050,7 @@
 
     @classmethod
     def get_by_full_path(cls, repo_full_path):
-        base_full_path = os.path.realpath(cls.base_path())
+        base_full_path = os.path.realpath(kallithea.CONFIG['base_path'])
         repo_full_path = os.path.realpath(repo_full_path)
         assert repo_full_path.startswith(base_full_path + os.path.sep)
         repo_name = repo_full_path[len(base_full_path) + 1:]
@@ -1104,18 +1061,6 @@
     def get_repo_forks(cls, repo_id):
         return cls.query().filter(Repository.fork_id == repo_id)
 
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path where all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
     @property
     def forks(self):
         """
@@ -1132,7 +1077,7 @@
 
     @property
     def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
+        return self.repo_name.split(URL_SEP)[-1]
 
     @property
     def groups_with_parents(self):
@@ -1145,35 +1090,18 @@
         groups.reverse()
         return groups
 
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
     @property
     def repo_full_path(self):
-        p = [self.repo_path]
+        """
+        Returns base full path for the repository - where it actually
+        exists on a filesystem.
+        """
+        p = [kallithea.CONFIG['base_path']]
         # we need to split the name by / since this is how we store the
         # names in the database, but that eventually needs to be converted
         # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
+        p += self.repo_name.split(URL_SEP)
+        return os.path.join(*p)
 
     def get_new_name(self, repo_name):
         """
@@ -1182,7 +1110,7 @@
         :param group_name:
         """
         path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
+        return URL_SEP.join(path_prefix + [repo_name])
 
     @property
     def _ui(self):
@@ -1190,7 +1118,7 @@
         Creates an db based ui object for this repository
         """
         from kallithea.lib.utils import make_ui
-        return make_ui(clear_session=False)
+        return make_ui()
 
     @classmethod
     def is_valid(cls, repo_name):
@@ -1202,7 +1130,7 @@
         """
         from kallithea.lib.utils import is_valid_repo
 
-        return is_valid_repo(repo_name, cls.base_path())
+        return is_valid_repo(repo_name, kallithea.CONFIG['base_path'])
 
     def get_api_data(self, with_revision_names=False,
                            with_pullrequests=False):
@@ -1397,47 +1325,34 @@
 
     def set_invalidate(self):
         """
-        Mark caches of this repo as invalid.
+        Flush SA session caches of instances of on disk repo.
         """
-        CacheInvalidation.set_invalidate(self.repo_name)
+        try:
+            del self._scm_instance
+        except AttributeError:
+            pass
 
-    _scm_instance = None
+    _scm_instance = None  # caching inside lifetime of SA session
 
     @property
     def scm_instance(self):
         if self._scm_instance is None:
-            self._scm_instance = self.scm_instance_cached()
+            return self.scm_instance_no_cache()  # will populate self._scm_instance
         return self._scm_instance
 
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term', 'scm_instance_cached')
-        def _c(repo_name): # repo_name is just for the cache key
-            log.debug('Creating new %s scm_instance and populating cache', repo_name)
-            return self.scm_instance_no_cache()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, 'scm_instance_cached', rn)
-        else:
-            log.debug('Trying to get scm_instance of %s from cache', rn)
-        return _c(rn)
-
     def scm_instance_no_cache(self):
-        repo_full_path = safe_str(self.repo_full_path)
+        repo_full_path = self.repo_full_path
         alias = get_scm(repo_full_path)[0]
         log.debug('Creating instance of %s repository from %s',
                   alias, self.repo_full_path)
         backend = get_backend(alias)
 
         if alias == 'hg':
-            repo = backend(repo_full_path, create=False,
-                           baseui=self._ui)
+            self._scm_instance = backend(repo_full_path, create=False, baseui=self._ui)
         else:
-            repo = backend(repo_full_path, create=False)
+            self._scm_instance = backend(repo_full_path, create=False)
 
-        return repo
+        return self._scm_instance
 
     def __json__(self):
         return dict(
@@ -1476,7 +1391,7 @@
         q = super(RepoGroup, cls).query()
 
         if sorted:
-            q = q.order_by(func.lower(RepoGroup.group_name))
+            q = q.order_by(sqlalchemy.func.lower(RepoGroup.group_name))
 
         return q
 
@@ -1484,16 +1399,16 @@
         self.group_name = group_name
         self.parent_group = parent_group
 
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
+    def __repr__(self):
+        return "<%s %s: %s>" % (self.__class__.__name__,
+                                self.group_id, self.group_name)
 
     @classmethod
     def _generate_choice(cls, repo_group):
         """Return tuple with group_id and name as html literal"""
         from webhelpers2.html import literal
         if repo_group is None:
-            return (-1, u'-- %s --' % _('top level'))
+            return (-1, '-- %s --' % _('top level'))
         return repo_group.group_id, literal(cls.SEP.join(repo_group.full_path_splitted))
 
     @classmethod
@@ -1503,28 +1418,18 @@
                       key=lambda c: c[1].split(cls.SEP))
 
     @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
     def guess_instance(cls, value):
         return super(RepoGroup, cls).guess_instance(value, RepoGroup.get_by_group_name)
 
     @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
+    def get_by_group_name(cls, group_name, case_insensitive=False):
         group_name = group_name.rstrip('/')
         if case_insensitive:
             gr = cls.query() \
-                .filter(func.lower(cls.group_name) == func.lower(group_name))
+                .filter(sqlalchemy.func.lower(cls.group_name) == sqlalchemy.func.lower(group_name))
         else:
             gr = cls.query() \
                 .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
         return gr.scalar()
 
     @property
@@ -1544,7 +1449,7 @@
 
     @property
     def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
+        return self.group_name.split(URL_SEP)[-1]
 
     @property
     def full_path(self):
@@ -1552,7 +1457,7 @@
 
     @property
     def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
+        return self.group_name.split(URL_SEP)
 
     @property
     def repositories(self):
@@ -1607,7 +1512,7 @@
         """
         path_prefix = (self.parent_group.full_path_splitted if
                        self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
+        return URL_SEP.join(path_prefix + [group_name])
 
     def get_api_data(self):
         """
@@ -1731,8 +1636,8 @@
     permission_id = Column(Integer(), primary_key=True)
     permission_name = Column(String(255), nullable=False)
 
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
+    def __repr__(self):
+        return "<%s %s: %r>" % (
             self.__class__.__name__, self.permission_id, self.permission_name
         )
 
@@ -1746,27 +1651,27 @@
 
     @classmethod
     def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
+        q = Session().query(UserRepoToPerm) \
+         .options(joinedload(UserRepoToPerm.repository)) \
+         .options(joinedload(UserRepoToPerm.permission)) \
          .filter(UserRepoToPerm.user_id == default_user_id)
 
         return q.all()
 
     @classmethod
     def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
+        q = Session().query(UserRepoGroupToPerm) \
+         .options(joinedload(UserRepoGroupToPerm.group)) \
+         .options(joinedload(UserRepoGroupToPerm.permission)) \
          .filter(UserRepoGroupToPerm.user_id == default_user_id)
 
         return q.all()
 
     @classmethod
     def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
+        q = Session().query(UserUserGroupToPerm) \
+         .options(joinedload(UserUserGroupToPerm.user_group)) \
+         .options(joinedload(UserUserGroupToPerm.permission)) \
          .filter(UserUserGroupToPerm.user_id == default_user_id)
 
         return q.all()
@@ -1797,8 +1702,9 @@
         Session().add(n)
         return n
 
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
+    def __repr__(self):
+        return '<%s %s at %s: %s>' % (
+            self.__class__.__name__, self.user, self.repository, self.permission)
 
 
 class UserUserGroupToPerm(Base, BaseDbModel):
@@ -1826,8 +1732,9 @@
         Session().add(n)
         return n
 
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
+    def __repr__(self):
+        return '<%s %s at %s: %s>' % (
+            self.__class__.__name__, self.user, self.user_group, self.permission)
 
 
 class UserToPerm(Base, BaseDbModel):
@@ -1844,8 +1751,9 @@
     user = relationship('User')
     permission = relationship('Permission')
 
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
+    def __repr__(self):
+        return '<%s %s: %s>' % (
+            self.__class__.__name__, self.user, self.permission)
 
 
 class UserGroupRepoToPerm(Base, BaseDbModel):
@@ -1873,8 +1781,9 @@
         Session().add(n)
         return n
 
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
+    def __repr__(self):
+        return '<%s %s at %s: %s>' % (
+            self.__class__.__name__, self.users_group, self.repository, self.permission)
 
 
 class UserGroupUserGroupToPerm(Base, BaseDbModel):
@@ -1902,8 +1811,9 @@
         Session().add(n)
         return n
 
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
+    def __repr__(self):
+        return '<%s %s at %s: %s>' % (
+            self.__class__.__name__, self.user_group, self.target_user_group, self.permission)
 
 
 class UserGroupToPerm(Base, BaseDbModel):
@@ -2006,137 +1916,13 @@
     user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
 
     follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by=lambda: func.lower(Repository.repo_name))
+    follows_repository = relationship('Repository', order_by=lambda: sqlalchemy.func.lower(Repository.repo_name))
 
     @classmethod
     def get_repo_followers(cls, repo_id):
         return cls.query().filter(cls.follows_repository_id == repo_id)
 
 
-class CacheInvalidation(Base, BaseDbModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        Index('key_idx', 'cache_key'),
-        _table_args_default_dict,
-    )
-
-    # cache_id, not used
-    cache_id = Column(Integer(), primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column(Unicode(255), nullable=False, unique=True)
-    # cache_args is a repo_name
-    cache_args = Column(Unicode(255), nullable=False)
-    # instance sets cache_active True when it is caching, other instances set
-    # cache_active to False to indicate that this cache is invalid
-    cache_active = Column(Boolean(), nullable=False, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
-            self.__class__.__name__,
-            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-        log.debug('for repo %s got %s invalidation objects',
-                  safe_str(repo_name), inv_objs)
-
-        for inv_obj in inv_objs:
-            log.debug('marking %s key for invalidation based on repo_name=%s',
-                      inv_obj, safe_str(repo_name))
-            Session().delete(inv_obj)
-        Session().commit()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-        if inv_obj is None:
-            inv_obj = cls(cache_key, repo_name)
-            Session().add(inv_obj)
-        elif inv_obj.cache_active:
-            return True
-        inv_obj.cache_active = True
-        try:
-            Session().commit()
-        except sqlalchemy.exc.IntegrityError:
-            log.error('commit of CacheInvalidation failed - retrying')
-            Session().rollback()
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if inv_obj is None:
-                log.error('failed to create CacheInvalidation entry')
-                # TODO: fail badly?
-            # else: TOCTOU - another thread added the key at the same time; no further action required
-        return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
 class ChangesetComment(Base, BaseDbModel):
     __tablename__ = 'changeset_comments'
     __table_args__ = (
@@ -2225,8 +2011,8 @@
     comment = relationship('ChangesetComment')
     pull_request = relationship('PullRequest')
 
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
+    def __repr__(self):
+        return "<%s %r by %r>" % (
             self.__class__.__name__,
             self.status, self.author
         )
@@ -2256,8 +2042,8 @@
     )
 
     # values for .status
-    STATUS_NEW = u'new'
-    STATUS_CLOSED = u'closed'
+    STATUS_NEW = 'new'
+    STATUS_CLOSED = 'closed'
 
     pull_request_id = Column(Integer(), primary_key=True)
     title = Column(Unicode(255), nullable=False)
@@ -2278,7 +2064,7 @@
 
     @revisions.setter
     def revisions(self, val):
-        self._revisions = safe_unicode(':'.join(val))
+        self._revisions = ':'.join(val)
 
     @property
     def org_ref_parts(self):
@@ -2426,9 +2212,9 @@
         _table_args_default_dict,
     )
 
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-    DEFAULT_FILENAME = u'gistfile1.txt'
+    GIST_PUBLIC = 'public'
+    GIST_PRIVATE = 'private'
+    DEFAULT_FILENAME = 'gistfile1.txt'
 
     gist_id = Column(Integer(), primary_key=True)
     gist_access_id = Column(Unicode(250), nullable=False)
@@ -2446,7 +2232,9 @@
         return (self.gist_expires != -1) & (time.time() > self.gist_expires)
 
     def __repr__(self):
-        return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
+        return "<%s %s %s>" % (
+            self.__class__.__name__,
+            self.gist_type, self.gist_access_id)
 
     @classmethod
     def guess_instance(cls, value):
@@ -2471,19 +2259,6 @@
         import kallithea.lib.helpers as h
         return h.canonical_url('gist', gist_id=self.gist_access_id)
 
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path where all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
     def get_api_data(self):
         """
         Common function for generating gist related data for API
@@ -2505,14 +2280,15 @@
         )
         data.update(self.get_api_data())
         return data
+
     ## SCM functions
 
     @property
     def scm_instance(self):
         from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
+        from kallithea.model.gist import GIST_STORE_LOC
+        gist_base_path = os.path.join(kallithea.CONFIG['base_path'], GIST_STORE_LOC)
+        return get_repo(os.path.join(gist_base_path, self.gist_access_id))
 
 
 class UserSshKeys(Base, BaseDbModel):
@@ -2543,5 +2319,5 @@
         # the full public key is too long to be suitable as database key - instead,
         # use fingerprints similar to 'ssh-keygen -E sha256 -lf ~/.ssh/id_rsa.pub'
         self._public_key = full_key
-        enc_key = full_key.split(" ")[1]
-        self.fingerprint = hashlib.sha256(enc_key.decode('base64')).digest().encode('base64').replace('\n', '').rstrip('=')
+        enc_key = safe_bytes(full_key.split(" ")[1])
+        self.fingerprint = base64.b64encode(hashlib.sha256(base64.b64decode(enc_key)).digest()).replace(b'\n', b'').rstrip(b'=').decode()
--- a/kallithea/model/forms.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/forms.py	Sat May 02 21:20:43 2020 +0200
@@ -238,7 +238,7 @@
     return _PasswordResetConfirmationForm
 
 
-def RepoForm(edit=False, old_data=None, supported_backends=BACKENDS.keys(),
+def RepoForm(edit=False, old_data=None, supported_backends=BACKENDS,
              repo_groups=None, landing_revs=None):
     old_data = old_data or {}
     repo_groups = repo_groups or []
@@ -315,7 +315,7 @@
     return _RepoFieldForm
 
 
-def RepoForkForm(edit=False, old_data=None, supported_backends=BACKENDS.keys(),
+def RepoForkForm(edit=False, old_data=None, supported_backends=BACKENDS,
                  repo_groups=None, landing_revs=None):
     old_data = old_data or {}
     repo_groups = repo_groups or []
@@ -435,7 +435,7 @@
     return _CustomDefaultPermissionsForm
 
 
-def DefaultsForm(edit=False, old_data=None, supported_backends=BACKENDS.keys()):
+def DefaultsForm(edit=False, old_data=None, supported_backends=BACKENDS):
     class _DefaultsForm(formencode.Schema):
         allow_extra_fields = True
         filter_extra_fields = True
@@ -558,11 +558,11 @@
 
         filename = All(v.BasePath()(),
                        v.UnicodeString(strip=True, required=False))
-        description = v.UnicodeString(required=False, if_missing=u'')
+        description = v.UnicodeString(required=False, if_missing='')
         lifetime = v.OneOf(lifetime_options)
         mimetype = v.UnicodeString(required=False, if_missing=None)
         content = v.UnicodeString(required=True, not_empty=True)
-        public = v.UnicodeString(required=False, if_missing=u'')
-        private = v.UnicodeString(required=False, if_missing=u'')
+        public = v.UnicodeString(required=False, if_missing='')
+        private = v.UnicodeString(required=False, if_missing='')
 
     return _GistForm
--- a/kallithea/model/gist.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/gist.py	Sat May 02 21:20:43 2020 +0200
@@ -32,8 +32,8 @@
 import time
 import traceback
 
-from kallithea.lib.compat import json
-from kallithea.lib.utils2 import AttributeDict, safe_int, safe_unicode, time_to_datetime
+from kallithea.lib import ext_json
+from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, time_to_datetime
 from kallithea.model.db import Gist, Session, User
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import ScmModel
@@ -45,12 +45,12 @@
 GIST_METADATA_FILE = '.rc_gist_metadata'
 
 
-def make_gist_id():
+def make_gist_access_id():
     """Generate a random, URL safe, almost certainly unique gist identifier."""
     rnd = random.SystemRandom() # use cryptographically secure system PRNG
     alphabet = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz'
     length = 20
-    return u''.join(rnd.choice(alphabet) for _ in xrange(length))
+    return ''.join(rnd.choice(alphabet) for _ in range(length))
 
 
 class GistModel(object):
@@ -82,7 +82,7 @@
             'gist_updated': time.time(),
         }
         with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f:
-            f.write(json.dumps(metadata))
+            f.write(ascii_bytes(ext_json.dumps(metadata)))
 
     def get_gist(self, gist):
         return Gist.guess_instance(gist)
@@ -108,7 +108,7 @@
         :param lifetime: in minutes, -1 == forever
         """
         owner = User.guess_instance(owner)
-        gist_id = make_gist_id()
+        gist_access_id = make_gist_access_id()
         lifetime = safe_int(lifetime, -1)
         gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
         log.debug('set GIST expiration date to: %s',
@@ -117,21 +117,19 @@
         # create the Database version
         gist = Gist()
         gist.gist_description = description
-        gist.gist_access_id = gist_id
+        gist.gist_access_id = gist_access_id
         gist.owner_id = owner.user_id
         gist.gist_expires = gist_expires
-        gist.gist_type = safe_unicode(gist_type)
+        gist.gist_type = gist_type
         Session().add(gist)
         Session().flush() # make database assign gist.gist_id
         if gist_type == Gist.GIST_PUBLIC:
             # use DB ID for easy to use GIST ID
-            gist_id = safe_unicode(gist.gist_id)
-            gist.gist_access_id = gist_id
+            gist.gist_access_id = str(gist.gist_id)
 
-        gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
-        log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
+        log.debug('Creating new %s GIST repo %s', gist_type, gist.gist_access_id)
         repo = RepoModel()._create_filesystem_repo(
-            repo_name=gist_id, repo_type='hg', repo_group=GIST_STORE_LOC)
+            repo_name=gist.gist_access_id, repo_type='hg', repo_group=GIST_STORE_LOC)
 
         processed_mapping = {}
         for filename in gist_mapping:
@@ -155,7 +153,7 @@
 
         # fake Kallithea Repository object
         fake_repo = AttributeDict(dict(
-            repo_name=gist_repo_path,
+            repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
             scm_instance_no_cache=lambda: repo,
         ))
         ScmModel().create_nodes(
@@ -219,7 +217,7 @@
 
         # fake Kallithea Repository object
         fake_repo = AttributeDict(dict(
-            repo_name=gist_repo.path,
+            repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
             scm_instance_no_cache=lambda: gist_repo,
         ))
 
--- a/kallithea/model/meta.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/meta.py	Sat May 02 21:20:43 2020 +0200
@@ -18,8 +18,6 @@
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import scoped_session, sessionmaker
 
-from kallithea.lib import caching_query
-
 
 # Beaker CacheManager.  A home base for cache configurations.
 cache_manager = cache.CacheManager()
@@ -29,18 +27,13 @@
 #
 # SQLAlchemy session manager.
 #
-session_factory = sessionmaker(
-    query_cls=caching_query.query_callable(cache_manager),
-    expire_on_commit=True)
+session_factory = sessionmaker(expire_on_commit=True)
 Session = scoped_session(session_factory)
 
 # The base class for declarative schemas in db.py
 # Engine is injected when model.__init__.init_model() sets meta.Base.metadata.bind
 Base = declarative_base()
 
-# to use cache use this in query:
-#   .options(FromCache("sqlalchemy_cache_type", "cachekey"))
-
 
 # Define naming conventions for foreign keys, primary keys, indexes,
 # check constraints, and unique constraints, respectively.
--- a/kallithea/model/notification.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/notification.py	Sat May 02 21:20:43 2020 +0200
@@ -33,9 +33,7 @@
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 
-import kallithea
 from kallithea.lib import helpers as h
-from kallithea.lib.utils2 import safe_unicode
 from kallithea.model.db import User
 
 
@@ -44,12 +42,12 @@
 
 class NotificationModel(object):
 
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention' # not used
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
+    TYPE_CHANGESET_COMMENT = 'cs_comment'
+    TYPE_MESSAGE = 'message'
+    TYPE_MENTION = 'mention' # not used
+    TYPE_REGISTRATION = 'registration'
+    TYPE_PULL_REQUEST = 'pull_request'
+    TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
 
     def create(self, created_by, subject, body, recipients=None,
                type_=TYPE_MESSAGE, with_email=True,
@@ -134,7 +132,8 @@
         # send email with notification to all other participants
         for rec in rec_objs:
             tasks.send_email([rec.email], email_subject, email_txt_body,
-                     email_html_body, headers, author=created_by_obj)
+                     email_html_body, headers,
+                     from_name=created_by_obj.full_name_or_username)
 
 
 class EmailNotificationModel(object):
@@ -150,7 +149,6 @@
 
     def __init__(self):
         super(EmailNotificationModel, self).__init__()
-        self._template_root = kallithea.CONFIG['paths']['templates'][0]
         self._tmpl_lookup = app_globals.mako_lookup
         self.email_types = {
             self.TYPE_CHANGESET_COMMENT: 'changeset_comment',
@@ -178,14 +176,21 @@
         try:
             subj = tmpl % kwargs
         except KeyError as e:
-            log.error('error generating email subject for %r from %s: %s', type_, ','.join(self._subj_map.keys()), e)
+            log.error('error generating email subject for %r from %s: %s', type_, ', '.join(self._subj_map), e)
             raise
-        l = [safe_unicode(x) for x in [kwargs.get('status_change'), kwargs.get('closing_pr') and _('Closing')] if x]
-        if l:
+        # gmail doesn't do proper threading but will ignore leading square
+        # bracket content ... so that is where we put status info
+        bracket_tags = []
+        status_change = kwargs.get('status_change')
+        if status_change:
+            bracket_tags.append(str(status_change))  # apply str to evaluate LazyString before .join
+        if kwargs.get('closing_pr'):
+            bracket_tags.append(_('Closing'))
+        if bracket_tags:
             if subj.startswith('['):
-                subj = '[' + ', '.join(l) + ': ' + subj[1:]
+                subj = '[' + ', '.join(bracket_tags) + ': ' + subj[1:]
             else:
-                subj = '[' + ', '.join(l) + '] ' + subj
+                subj = '[' + ', '.join(bracket_tags) + '] ' + subj
         return subj
 
     def get_email_tmpl(self, type_, content_type, **kwargs):
--- a/kallithea/model/permission.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/permission.py	Sat May 02 21:20:43 2020 +0200
@@ -73,10 +73,8 @@
             return '.'.join(perm_name.split('.')[:1])
 
         perms = UserToPerm.query().filter(UserToPerm.user == user).all()
-        defined_perms_groups = map(_get_group,
-                                (x.permission.permission_name for x in perms))
+        defined_perms_groups = set(_get_group(x.permission.permission_name) for x in perms)
         log.debug('GOT ALREADY DEFINED:%s', perms)
-        DEFAULT_PERMS = Permission.DEFAULT_USER_PERMISSIONS
 
         if force:
             for perm in perms:
@@ -85,7 +83,7 @@
             defined_perms_groups = []
         # For every default permission that needs to be created, we check if
         # its group is already defined. If it's not, we create default permission.
-        for perm_name in DEFAULT_PERMS:
+        for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
             gr = _get_group(perm_name)
             if gr not in defined_perms_groups:
                 log.debug('GR:%s not found, creating permission %s',
--- a/kallithea/model/pull_request.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/pull_request.py	Sat May 02 21:20:43 2020 +0200
@@ -33,7 +33,7 @@
 from tg.i18n import ugettext as _
 
 from kallithea.lib import helpers as h
-from kallithea.lib.utils2 import extract_mentioned_users, safe_str, safe_unicode
+from kallithea.lib.utils2 import ascii_bytes, extract_mentioned_users
 from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, User
 from kallithea.model.meta import Session
 from kallithea.model.notification import NotificationModel
@@ -68,14 +68,12 @@
         threading = ['%s-pr-%s@%s' % (pr.other_repo.repo_name,
                                       pr.pull_request_id,
                                       h.canonical_hostname())]
-        subject = safe_unicode(
-            h.link_to(
-              _('%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s') %
+        subject = h.link_to(
+            _('%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s') %
                 {'user': user.username,
                  'pr_title': pr.title,
                  'pr_nice_id': pr.nice_id()},
-                pr_url)
-            )
+            pr_url)
         body = pr.description
         _org_ref_type, org_ref_name, _org_rev = pr.org_ref.split(':')
         _other_ref_type, other_ref_name, _other_rev = pr.other_ref.split(':')
@@ -261,13 +259,13 @@
 
         if self.org_repo.scm_instance.alias == 'git':
             # create a ref under refs/pull/ so that commits don't get garbage-collected
-            self.org_repo.scm_instance._repo["refs/pull/%d/head" % pr.pull_request_id] = safe_str(self.org_rev)
+            self.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pr.pull_request_id] = ascii_bytes(self.org_rev)
 
         # reset state to under-review
         from kallithea.model.changeset_status import ChangesetStatusModel
         from kallithea.model.comment import ChangesetCommentsModel
         comment = ChangesetCommentsModel().create(
-            text=u'',
+            text='',
             repo=self.org_repo,
             author=created_by,
             pull_request=pr,
@@ -362,11 +360,11 @@
             infos.append(_('No changes found on %s %s since previous iteration.') % (org_ref_type, org_ref_name))
             # TODO: fail?
 
-        try:
-            title, old_v = re.match(r'(.*)\(v(\d+)\)\s*$', title).groups()
-            v = int(old_v) + 1
-        except (AttributeError, ValueError):
-            v = 2
+        v = 2
+        m = re.match(r'(.*)\(v(\d+)\)\s*$', title)
+        if m is not None:
+            title = m.group(1)
+            v = int(m.group(2)) + 1
         self.create_action.title = '%s (v%s)' % (title.strip(), v)
 
         # using a mail-like separator, insert new iteration info in description with latest first
--- a/kallithea/model/repo.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/repo.py	Sat May 02 21:20:43 2020 +0200
@@ -35,14 +35,13 @@
 import kallithea.lib.utils2
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel
-from kallithea.lib.caching_query import FromCache
 from kallithea.lib.exceptions import AttachedForksError
 from kallithea.lib.hooks import log_delete_repository
 from kallithea.lib.utils import is_valid_repo_uri, make_ui
-from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix, safe_str, safe_unicode
+from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix
 from kallithea.lib.vcs.backends import get_backend
-from kallithea.model.db import (
-    Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm)
+from kallithea.model.db import (URL_SEP, Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm,
+                                UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm)
 
 
 log = logging.getLogger(__name__)
@@ -50,7 +49,7 @@
 
 class RepoModel(object):
 
-    URL_SEPARATOR = Repository.url_sep()
+    URL_SEPARATOR = URL_SEP
 
     def _create_default_perms(self, repository, private):
         # create default permission
@@ -81,25 +80,17 @@
         q = Ui.query().filter(Ui.ui_key == '/').one()
         return q.ui_value
 
-    def get(self, repo_id, cache=False):
+    def get(self, repo_id):
         repo = Repository.query() \
             .filter(Repository.repo_id == repo_id)
-
-        if cache:
-            repo = repo.options(FromCache("sql_cache_short",
-                                          "get_repo_%s" % repo_id))
         return repo.scalar()
 
     def get_repo(self, repository):
         return Repository.guess_instance(repository)
 
-    def get_by_repo_name(self, repo_name, cache=False):
+    def get_by_repo_name(self, repo_name):
         repo = Repository.query() \
             .filter(Repository.repo_name == repo_name)
-
-        if cache:
-            repo = repo.options(FromCache("sql_cache_short",
-                                          "get_repo_%s" % repo_name))
         return repo.scalar()
 
     def get_all_user_repos(self, user):
@@ -109,17 +100,15 @@
         :param user:
         """
         from kallithea.lib.auth import AuthUser
-        user = User.guess_instance(user)
-        repos = AuthUser(dbuser=user).permissions['repositories']
-        access_check = lambda r: r[1] in ['repository.read',
-                                          'repository.write',
-                                          'repository.admin']
-        repos = [x[0] for x in filter(access_check, repos.items())]
+        auth_user = AuthUser(dbuser=User.guess_instance(user))
+        repos = [repo_name
+            for repo_name, perm in auth_user.permissions['repositories'].items()
+            if perm in ['repository.read', 'repository.write', 'repository.admin']
+            ]
         return Repository.query().filter(Repository.repo_name.in_(repos))
 
     @classmethod
     def _render_datatable(cls, tmpl, *args, **kwargs):
-        import kallithea
         from tg import tmpl_context as c, request, app_globals
         from tg.i18n import ugettext as _
 
@@ -128,7 +117,7 @@
 
         tmpl = template.get_def(tmpl)
         kwargs.update(dict(_=_, h=h, c=c, request=request))
-        return tmpl.render(*args, **kwargs)
+        return tmpl.render_unicode(*args, **kwargs)
 
     def get_repos_as_dict(self, repos_list, repo_groups_list=None,
                           admin=False,
@@ -139,12 +128,16 @@
         admin: return data for action column.
         """
         _render = self._render_datatable
-        from tg import tmpl_context as c
+        from tg import tmpl_context as c, request
+        from kallithea.model.scm import ScmModel
 
         def repo_lnk(name, rtype, rstate, private, fork_of):
             return _render('repo_name', name, rtype, rstate, private, fork_of,
                            short_name=short_name)
 
+        def following(repo_id, is_following):
+            return _render('following', repo_id, is_following)
+
         def last_change(last_change):
             return _render("last_change", last_change)
 
@@ -189,6 +182,10 @@
                 "just_name": repo.just_name,
                 "name": repo_lnk(repo.repo_name, repo.repo_type,
                                  repo.repo_state, repo.private, repo.fork),
+                "following": following(
+                    repo.repo_id,
+                    ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id),
+                ),
                 "last_change_iso": repo.last_db_change.isoformat(),
                 "last_change": last_change(repo.last_db_change),
                 "last_changeset": last_rev(repo.repo_name, cs_cache),
@@ -273,7 +270,7 @@
                 cur_repo.owner = User.get_by_username(kwargs['owner'])
 
             if 'repo_group' in kwargs:
-                assert kwargs['repo_group'] != u'-1', kwargs # RepoForm should have converted to None
+                assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None
                 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
                 cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name)
             log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
@@ -290,7 +287,7 @@
                 # clone_uri is modified - if given a value, check it is valid
                 if clone_uri != '':
                     # will raise exception on error
-                    is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui(clear_session=False))
+                    is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui())
                 cur_repo.clone_uri = clone_uri
 
             if 'repo_name' in kwargs:
@@ -306,8 +303,7 @@
                     repo=cur_repo, user='default', perm=EMPTY_PERM
                 )
                 # handle extra fields
-            for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
-                                kwargs):
+            for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]:
                 k = RepositoryField.un_prefix_key(field)
                 ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
                 if ex_field:
@@ -339,13 +335,13 @@
         fork_of = Repository.guess_instance(fork_of)
         repo_group = RepoGroup.guess_instance(repo_group)
         try:
-            repo_name = safe_unicode(repo_name)
-            description = safe_unicode(description)
+            repo_name = repo_name
+            description = description
             # repo name is just a name of repository
             # while repo_name_full is a full qualified name that is combined
             # with name and path of group
             repo_name_full = repo_name
-            repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
+            repo_name = repo_name.split(URL_SEP)[-1]
             if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name:
                 raise Exception('invalid repo name %s' % repo_name)
 
@@ -360,7 +356,7 @@
             new_repo.private = private
             if clone_uri:
                 # will raise exception on error
-                is_valid_repo_uri(repo_type, clone_uri, make_ui(clear_session=False))
+                is_valid_repo_uri(repo_type, clone_uri, make_ui())
             new_repo.clone_uri = clone_uri
             new_repo.landing_rev = landing_rev
 
@@ -643,8 +639,7 @@
             _paths = [repo_store_location]
         else:
             _paths = [self.repos_path, new_parent_path, repo_name]
-            # we need to make it str for mercurial
-        repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
+        repo_path = os.path.join(*_paths)
 
         # check if this path is not a repository
         if is_valid_repo(repo_path, self.repos_path):
@@ -655,13 +650,13 @@
             raise Exception('This path %s is a valid group' % repo_path)
 
         log.info('creating repo %s in %s from url: `%s`',
-            repo_name, safe_unicode(repo_path),
+            repo_name, repo_path,
             obfuscate_url_pw(clone_uri))
 
         backend = get_backend(repo_type)
 
         if repo_type == 'hg':
-            baseui = make_ui(clear_session=False)
+            baseui = make_ui()
             # patch and reset hooks section of UI config to not run any
             # hooks on creating remote repo
             for k, v in baseui.configitems('hooks'):
@@ -676,7 +671,7 @@
             raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 
         log.debug('Created repo %s with %s backend',
-                  safe_unicode(repo_name), safe_unicode(repo_type))
+                  repo_name, repo_type)
         return repo
 
     def _rename_filesystem_repo(self, old, new):
@@ -688,8 +683,8 @@
         """
         log.info('renaming repo from %s to %s', old, new)
 
-        old_path = safe_str(os.path.join(self.repos_path, old))
-        new_path = safe_str(os.path.join(self.repos_path, new))
+        old_path = os.path.join(self.repos_path, old)
+        new_path = os.path.join(self.repos_path, new)
         if os.path.isdir(new_path):
             raise Exception(
                 'Was trying to rename to already existing dir %s' % new_path
@@ -704,7 +699,7 @@
 
         :param repo: repo object
         """
-        rm_path = safe_str(os.path.join(self.repos_path, repo.repo_name))
+        rm_path = os.path.join(self.repos_path, repo.repo_name)
         log.info("Removing %s", rm_path)
 
         _now = datetime.now()
@@ -715,6 +710,6 @@
             args = repo.group.full_path_splitted + [_d]
             _d = os.path.join(*args)
         if os.path.exists(rm_path):
-            shutil.move(rm_path, safe_str(os.path.join(self.repos_path, _d)))
+            shutil.move(rm_path, os.path.join(self.repos_path, _d))
         else:
             log.error("Can't find repo to delete in %r", rm_path)
--- a/kallithea/model/repo_group.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/repo_group.py	Sat May 02 21:20:43 2020 +0200
@@ -34,6 +34,7 @@
 
 import kallithea.lib.utils2
 from kallithea.lib.utils2 import LazyProperty
+from kallithea.model import db
 from kallithea.model.db import Permission, RepoGroup, Repository, Session, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserRepoGroupToPerm
 
 
@@ -115,7 +116,7 @@
         :param group: instance of group from database
         :param force_delete: use shutil rmtree to remove all objects
         """
-        paths = group.full_path.split(RepoGroup.url_sep())
+        paths = group.full_path.split(db.URL_SEP)
         paths = os.sep.join(paths)
 
         rm_path = os.path.join(self.repos_path, paths)
@@ -288,7 +289,7 @@
                 repo_group.parent_group_id = repo_group_args['parent_group_id']
 
             if 'parent_group_id' in repo_group_args:
-                assert repo_group_args['parent_group_id'] != u'-1', repo_group_args  # RepoGroupForm should have converted to None
+                assert repo_group_args['parent_group_id'] != '-1', repo_group_args  # RepoGroupForm should have converted to None
                 repo_group.parent_group = RepoGroup.get(repo_group_args['parent_group_id'])
             if 'group_name' in repo_group_args:
                 group_name = repo_group_args['group_name']
--- a/kallithea/model/repo_permission.py	Thu Apr 09 18:03:56 2020 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,100 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.model.repo_permission
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-repository permission model for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Oct 1, 2011
-:author: nvinot, marcink
-"""
-
-import logging
-
-from kallithea.model.db import Permission, Repository, Session, User, UserGroupRepoToPerm, UserRepoToPerm
-
-
-log = logging.getLogger(__name__)
-
-
-class RepositoryPermissionModel(object):
-
-    def get_user_permission(self, repository, user):
-        repository = Repository.guess_instance(repository)
-        user = User.guess_instance(user)
-
-        return UserRepoToPerm.query() \
-                .filter(UserRepoToPerm.user == user) \
-                .filter(UserRepoToPerm.repository == repository) \
-                .scalar()
-
-    def update_user_permission(self, repository, user, permission):
-        permission = Permission.get_by_key(permission)
-        current = self.get_user_permission(repository, user)
-        if current:
-            if current.permission is not permission:
-                current.permission = permission
-        else:
-            p = UserRepoToPerm()
-            p.user = user
-            p.repository = repository
-            p.permission = permission
-            Session().add(p)
-
-    def delete_user_permission(self, repository, user):
-        current = self.get_user_permission(repository, user)
-        if current:
-            Session().delete(current)
-
-    def get_users_group_permission(self, repository, users_group):
-        return UserGroupRepoToPerm.query() \
-                .filter(UserGroupRepoToPerm.users_group == users_group) \
-                .filter(UserGroupRepoToPerm.repository == repository) \
-                .scalar()
-
-    def update_users_group_permission(self, repository, users_group,
-                                      permission):
-        permission = Permission.get_by_key(permission)
-        current = self.get_users_group_permission(repository, users_group)
-        if current:
-            if current.permission is not permission:
-                current.permission = permission
-        else:
-            p = UserGroupRepoToPerm()
-            p.users_group = users_group
-            p.repository = repository
-            p.permission = permission
-            Session().add(p)
-
-    def delete_users_group_permission(self, repository, users_group):
-        current = self.get_users_group_permission(repository, users_group)
-        if current:
-            Session().delete(current)
-
-    def update_or_delete_user_permission(self, repository, user, permission):
-        if permission:
-            self.update_user_permission(repository, user, permission)
-        else:
-            self.delete_user_permission(repository, user)
-
-    def update_or_delete_users_group_permission(self, repository, user_group,
-                                              permission):
-        if permission:
-            self.update_users_group_permission(repository, user_group,
-                                               permission)
-        else:
-            self.delete_users_group_permission(repository, user_group)
--- a/kallithea/model/scm.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/scm.py	Sat May 02 21:20:43 2020 +0200
@@ -25,7 +25,6 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import cStringIO
 import logging
 import os
 import posixpath
@@ -42,7 +41,7 @@
 from kallithea.lib.exceptions import IMCCommitError, NonRelativePathError
 from kallithea.lib.hooks import process_pushed_raw_ids
 from kallithea.lib.utils import action_logger, get_filesystem_repos, make_ui
-from kallithea.lib.utils2 import safe_str, safe_unicode, set_hook_environment
+from kallithea.lib.utils2 import safe_bytes, set_hook_environment
 from kallithea.lib.vcs import get_backend
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import RepositoryError
@@ -140,9 +139,11 @@
         cls = Repository
         if isinstance(instance, cls):
             return instance
-        elif isinstance(instance, int) or safe_str(instance).isdigit():
+        elif isinstance(instance, int):
             return cls.get(instance)
-        elif isinstance(instance, basestring):
+        elif isinstance(instance, str):
+            if instance.isdigit():
+                return cls.get(int(instance))
             return cls.get_by_repo_name(instance)
         elif instance is not None:
             raise Exception('given object must be int, basestr or Instance'
@@ -161,7 +162,8 @@
     def repo_scan(self, repos_path=None):
         """
         Listing of repositories in given path. This path should not be a
-        repository itself. Return a dictionary of repository objects
+        repository itself. Return a dictionary of repository objects mapping to
+        vcs instances.
 
         :param repos_path: path to directory containing repositories
         """
@@ -187,10 +189,10 @@
 
                     klass = get_backend(path[0])
 
-                    if path[0] == 'hg' and path[0] in BACKENDS.keys():
-                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
+                    if path[0] == 'hg' and path[0] in BACKENDS:
+                        repos[name] = klass(path[1], baseui=baseui)
 
-                    if path[0] == 'git' and path[0] in BACKENDS.keys():
+                    if path[0] == 'git' and path[0] in BACKENDS:
                         repos[name] = klass(path[1])
             except OSError:
                 continue
@@ -394,17 +396,8 @@
         """
         user = User.guess_instance(user)
         IMC = self._get_IMC_module(repo.alias)
-
-        # decoding here will force that we have proper encoded values
-        # in any other case this will throw exceptions and deny commit
-        content = safe_str(content)
-        path = safe_str(f_path)
-        # message and author needs to be unicode
-        # proper backend should then translate that into required type
-        message = safe_unicode(message)
-        author = safe_unicode(author)
         imc = IMC(repo)
-        imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path)))
+        imc.change(FileNode(f_path, content, mode=cs.get_file_mode(f_path)))
         try:
             tip = imc.commit(message=message, author=author,
                              parents=[cs], branch=cs.branch)
@@ -480,22 +473,14 @@
         for f_path in nodes:
             content = nodes[f_path]['content']
             f_path = self._sanitize_path(f_path)
-            f_path = safe_str(f_path)
-            # decoding here will force that we have proper encoded values
-            # in any other case this will throw exceptions and deny commit
-            if isinstance(content, (basestring,)):
-                content = safe_str(content)
-            elif isinstance(content, (file, cStringIO.OutputType,)):
+            if not isinstance(content, str) and not isinstance(content, bytes):
                 content = content.read()
-            else:
-                raise Exception('Content is of unrecognized type %s' % (
-                    type(content)
-                ))
             processed_nodes.append((f_path, content))
 
-        message = safe_unicode(message)
+        message = message
         committer = user.full_contact
-        author = safe_unicode(author) if author else committer
+        if not author:
+            author = committer
 
         IMC = self._get_IMC_module(scm_instance.alias)
         imc = IMC(scm_instance)
@@ -536,9 +521,10 @@
         user = User.guess_instance(user)
         scm_instance = repo.scm_instance_no_cache()
 
-        message = safe_unicode(message)
+        message = message
         committer = user.full_contact
-        author = safe_unicode(author) if author else committer
+        if not author:
+            author = committer
 
         imc_class = self._get_IMC_module(scm_instance.alias)
         imc = imc_class(scm_instance)
@@ -616,9 +602,10 @@
             content = nodes[f_path].get('content')
             processed_nodes.append((f_path, content))
 
-        message = safe_unicode(message)
+        message = message
         committer = user.full_contact
-        author = safe_unicode(author) if author else committer
+        if not author:
+            author = committer
 
         IMC = self._get_IMC_module(scm_instance.alias)
         imc = IMC(scm_instance)
@@ -672,19 +659,19 @@
 
         repo = repo.scm_instance
 
-        branches_group = ([(u'branch:%s' % k, k) for k, v in
-                           repo.branches.iteritems()], _("Branches"))
+        branches_group = ([('branch:%s' % k, k) for k, v in
+                           repo.branches.items()], _("Branches"))
         hist_l.append(branches_group)
         choices.extend([x[0] for x in branches_group[0]])
 
         if repo.alias == 'hg':
-            bookmarks_group = ([(u'book:%s' % k, k) for k, v in
-                                repo.bookmarks.iteritems()], _("Bookmarks"))
+            bookmarks_group = ([('book:%s' % k, k) for k, v in
+                                repo.bookmarks.items()], _("Bookmarks"))
             hist_l.append(bookmarks_group)
             choices.extend([x[0] for x in bookmarks_group[0]])
 
-        tags_group = ([(u'tag:%s' % k, k) for k, v in
-                       repo.tags.iteritems()], _("Tags"))
+        tags_group = ([('tag:%s' % k, k) for k, v in
+                       repo.tags.items()], _("Tags"))
         hist_l.append(tags_group)
         choices.extend([x[0] for x in tags_group[0]])
 
@@ -702,7 +689,7 @@
         # FIXME This may not work on Windows and may need a shell wrapper script.
         return (kallithea.CONFIG.get('git_hook_interpreter')
                 or sys.executable
-                or '/usr/bin/env python2')
+                or '/usr/bin/env python3')
 
     def install_git_hooks(self, repo, force_create=False):
         """
@@ -718,11 +705,11 @@
         if not os.path.isdir(loc):
             os.makedirs(loc)
 
-        tmpl_post = "#!%s\n" % self._get_git_hook_interpreter()
+        tmpl_post = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter())
         tmpl_post += pkg_resources.resource_string(
             'kallithea', os.path.join('config', 'post_receive_tmpl.py')
         )
-        tmpl_pre = "#!%s\n" % self._get_git_hook_interpreter()
+        tmpl_pre = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter())
         tmpl_pre += pkg_resources.resource_string(
             'kallithea', os.path.join('config', 'pre_receive_tmpl.py')
         )
@@ -736,12 +723,11 @@
                 log.debug('hook exists, checking if it is from kallithea')
                 with open(_hook_file, 'rb') as f:
                     data = f.read()
-                    matches = re.compile(r'(?:%s)\s*=\s*(.*)'
-                                         % 'KALLITHEA_HOOK_VER').search(data)
+                    matches = re.search(br'^KALLITHEA_HOOK_VER\s*=\s*(.*)$', data, flags=re.MULTILINE)
                     if matches:
                         try:
                             ver = matches.groups()[0]
-                            log.debug('got %s it is kallithea', ver)
+                            log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %r', ver)
                             has_hook = True
                         except Exception:
                             log.error(traceback.format_exc())
@@ -753,9 +739,9 @@
                 log.debug('writing %s hook file !', h_type)
                 try:
                     with open(_hook_file, 'wb') as f:
-                        tmpl = tmpl.replace('_TMPL_', kallithea.__version__)
+                        tmpl = tmpl.replace(b'_TMPL_', safe_bytes(kallithea.__version__))
                         f.write(tmpl)
-                    os.chmod(_hook_file, 0755)
+                    os.chmod(_hook_file, 0o755)
                 except IOError as e:
                     log.error('error writing %s: %s', _hook_file, e)
             else:
--- a/kallithea/model/ssh_key.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/ssh_key.py	Sat May 02 21:20:43 2020 +0200
@@ -29,7 +29,7 @@
 from tg.i18n import ugettext as _
 
 from kallithea.lib import ssh
-from kallithea.lib.utils2 import safe_str, str2bool
+from kallithea.lib.utils2 import str2bool
 from kallithea.lib.vcs.exceptions import RepositoryError
 from kallithea.model.db import User, UserSshKeys
 from kallithea.model.meta import Session
@@ -52,9 +52,9 @@
         Will raise SshKeyModelException on errors
         """
         try:
-            keytype, pub, comment = ssh.parse_pub_key(public_key)
+            keytype, _pub, comment = ssh.parse_pub_key(public_key)
         except ssh.SshKeyParseError as e:
-            raise SshKeyModelException(_('SSH key %r is invalid: %s') % (safe_str(public_key), e.message))
+            raise SshKeyModelException(_('SSH key %r is invalid: %s') % (public_key, e.args[0]))
         if not description.strip():
             description = comment.strip()
 
@@ -85,7 +85,7 @@
 
         ssh_key = ssh_key.scalar()
         if ssh_key is None:
-            raise SshKeyModelException(_('SSH key with fingerprint %r found') % safe_str(fingerprint))
+            raise SshKeyModelException(_('SSH key with fingerprint %r found') % fingerprint)
         Session().delete(ssh_key)
 
     def get_ssh_keys(self, user):
@@ -134,8 +134,5 @@
             for key in UserSshKeys.query().join(UserSshKeys.user).filter(User.active == True):
                 f.write(ssh.authorized_keys_line(kallithea_cli_path, config['__file__'], key))
         os.chmod(tmp_authorized_keys, stat.S_IRUSR | stat.S_IWUSR)
-        # This preliminary remove is needed for Windows, not for Unix.
-        # TODO In Python 3, the remove+rename sequence below should become os.replace.
-        if os.path.exists(authorized_keys):
-            os.remove(authorized_keys)
-        os.rename(tmp_authorized_keys, authorized_keys)
+        # Note: simple overwrite / rename isn't enough to replace the file on Windows
+        os.replace(tmp_authorized_keys, authorized_keys)
--- a/kallithea/model/user.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/user.py	Sat May 02 21:20:43 2020 +0200
@@ -36,9 +36,8 @@
 from tg import config
 from tg.i18n import ugettext as _
 
-from kallithea.lib.caching_query import FromCache
 from kallithea.lib.exceptions import DefaultUserException, UserOwnsReposException
-from kallithea.lib.utils2 import generate_api_key, get_current_authuser, safe_unicode
+from kallithea.lib.utils2 import generate_api_key, get_current_authuser
 from kallithea.model.db import Permission, User, UserEmailMap, UserIpMap, UserToPerm
 from kallithea.model.meta import Session
 
@@ -49,11 +48,8 @@
 class UserModel(object):
     password_reset_token_lifetime = 86400 # 24 hours
 
-    def get(self, user_id, cache=False):
+    def get(self, user_id):
         user = User.query()
-        if cache:
-            user = user.options(FromCache("sql_cache_short",
-                                          "get_user_%s" % user_id))
         return user.get(user_id)
 
     def get_user(self, user):
@@ -94,8 +90,8 @@
         log_create_user(new_user.get_dict(), cur_user)
         return new_user
 
-    def create_or_update(self, username, password, email, firstname=u'',
-                         lastname=u'', active=True, admin=False,
+    def create_or_update(self, username, password, email, firstname='',
+                         lastname='', active=True, admin=False,
                          extern_type=None, extern_name=None, cur_user=None):
         """
         Creates a new instance if not found, or updates current one
@@ -142,10 +138,8 @@
             new_user.admin = admin
             new_user.email = email
             new_user.active = active
-            new_user.extern_name = safe_unicode(extern_name) \
-                if extern_name else None
-            new_user.extern_type = safe_unicode(extern_type) \
-                if extern_type else None
+            new_user.extern_name = extern_name
+            new_user.extern_type = extern_type
             new_user.name = firstname
             new_user.lastname = lastname
 
@@ -185,7 +179,7 @@
         # notification to admins
         subject = _('New user registration')
         body = (
-            u'New user registration\n'
+            'New user registration\n'
             '---------------------\n'
             '- Username: {user.username}\n'
             '- Full Name: {user.full_name}\n'
@@ -205,7 +199,7 @@
     def update(self, user_id, form_data, skip_attrs=None):
         from kallithea.lib.auth import get_crypt_password
         skip_attrs = skip_attrs or []
-        user = self.get(user_id, cache=False)
+        user = self.get(user_id)
         if user.is_default_user:
             raise DefaultUserException(
                             _("You can't edit this user since it's "
@@ -310,8 +304,8 @@
         """
         app_secret = config.get('app_instance_uuid')
         return hmac.HMAC(
-            key=u'\0'.join([app_secret, user.password]).encode('utf-8'),
-            msg=u'\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'),
+            key='\0'.join([app_secret, user.password]).encode('utf-8'),
+            msg='\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'),
             digestmod=hashlib.sha1,
         ).hexdigest()
 
--- a/kallithea/model/user_group.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/user_group.py	Sat May 02 21:20:43 2020 +0200
@@ -28,8 +28,8 @@
 import traceback
 
 from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException
-from kallithea.model.db import (
-    Permission, Session, User, UserGroup, UserGroupMember, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm, UserUserGroupToPerm)
+from kallithea.model.db import (Permission, Session, User, UserGroup, UserGroupMember, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm,
+                                UserUserGroupToPerm)
 
 
 log = logging.getLogger(__name__)
@@ -94,8 +94,8 @@
     def get_group(self, user_group):
         return UserGroup.guess_instance(user_group)
 
-    def get_by_name(self, name, cache=False, case_insensitive=False):
-        return UserGroup.get_by_group_name(name, cache=cache, case_insensitive=case_insensitive)
+    def get_by_name(self, name, case_insensitive=False):
+        return UserGroup.get_by_group_name(name, case_insensitive=case_insensitive)
 
     def create(self, name, description, owner, active=True, group_data=None):
         try:
@@ -126,7 +126,7 @@
                 if k == 'users_group_members':
                     members_list = []
                     if v:
-                        v = [v] if isinstance(v, basestring) else v
+                        v = [v] if isinstance(v, str) else v
                         for u_id in set(v):
                             member = UserGroupMember(user_group.users_group_id, u_id)
                             members_list.append(member)
@@ -367,7 +367,7 @@
         for gr in set(groups):
             existing_group = UserGroup.get_by_group_name(gr)
             if not existing_group:
-                desc = u'Automatically created from plugin:%s' % extern_type
+                desc = 'Automatically created from plugin:%s' % extern_type
                 # we use first admin account to set the owner of the group
                 existing_group = UserGroupModel().create(gr, desc, owner,
                                         group_data={'extern_type': extern_type})
--- a/kallithea/model/validators.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/model/validators.py	Sat May 02 21:20:43 2020 +0200
@@ -30,9 +30,10 @@
 from kallithea.config.routing import ADMIN_PREFIX
 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel
 from kallithea.lib.compat import OrderedSet
-from kallithea.lib.exceptions import LdapImportError
+from kallithea.lib.exceptions import InvalidCloneUriException, LdapImportError
 from kallithea.lib.utils import is_valid_repo_uri
 from kallithea.lib.utils2 import aslist, repo_name_slug, str2bool
+from kallithea.model import db
 from kallithea.model.db import RepoGroup, Repository, User, UserGroup
 
 
@@ -186,11 +187,7 @@
             slug = repo_name_slug(group_name)
 
             # check for parent of self
-            parent_of_self = lambda: (
-                old_data['group_id'] == parent_group_id
-                if parent_group_id else False
-            )
-            if edit and parent_of_self():
+            if edit and parent_group_id and old_data['group_id'] == parent_group_id:
                 msg = self.message('parent_group_id', state)
                 raise formencode.Invalid(msg, value, state,
                     error_dict=dict(parent_group_id=msg)
@@ -235,7 +232,7 @@
 
         def _validate_python(self, value, state):
             try:
-                (value or '').decode('ascii')
+                (value or '').encode('ascii')
             except UnicodeError:
                 msg = self.message('invalid_password', state)
                 raise formencode.Invalid(msg, value, state,)
@@ -276,7 +273,7 @@
 def ValidAuth():
     class _validator(formencode.validators.FancyValidator):
         messages = {
-            'invalid_auth': _(u'Invalid username or password'),
+            'invalid_auth': _('Invalid username or password'),
         }
 
         def _validate_python(self, value, state):
@@ -329,7 +326,7 @@
                 # value needs to be aware of group name in order to check
                 # db key This is an actual just the name to store in the
                 # database
-                repo_name_full = group_path + RepoGroup.url_sep() + repo_name
+                repo_name_full = group_path + db.URL_SEP + repo_name
             else:
                 group_name = group_path = ''
                 repo_name_full = repo_name
@@ -412,9 +409,9 @@
 
             if url and url != value.get('clone_uri_hidden'):
                 try:
-                    is_valid_repo_uri(repo_type, url, make_ui(clear_session=False))
-                except Exception:
-                    log.exception('URL validation failed')
+                    is_valid_repo_uri(repo_type, url, make_ui())
+                except InvalidCloneUriException as e:
+                    log.warning('validation of clone URL %r failed: %s', url, e)
                     msg = self.message('clone_uri', state)
                     raise formencode.Invalid(msg, value, state,
                         error_dict=dict(clone_uri=msg)
@@ -544,7 +541,7 @@
 
             # CLEAN OUT ORG VALUE FROM NEW MEMBERS, and group them using
             new_perms_group = defaultdict(dict)
-            for k, v in value.copy().iteritems():
+            for k, v in value.copy().items():
                 if k.startswith('perm_new_member'):
                     del value[k]
                     _type, part = k.split('perm_new_member_')
@@ -556,26 +553,26 @@
                         new_perms_group[pos][_key] = v
 
             # fill new permissions in order of how they were added
-            for k in sorted(map(int, new_perms_group.keys())):
-                perm_dict = new_perms_group[str(k)]
+            for k in sorted(new_perms_group, key=lambda k: int(k)):
+                perm_dict = new_perms_group[k]
                 new_member = perm_dict.get('name')
                 new_perm = perm_dict.get('perm')
                 new_type = perm_dict.get('type')
                 if new_member and new_perm and new_type:
                     perms_new.add((new_member, new_perm, new_type))
 
-            for k, v in value.iteritems():
+            for k, v in value.items():
                 if k.startswith('u_perm_') or k.startswith('g_perm_'):
-                    member = k[7:]
+                    member_name = k[7:]
                     t = {'u': 'user',
                          'g': 'users_group'
                     }[k[0]]
-                    if member == User.DEFAULT_USER:
+                    if member_name == User.DEFAULT_USER_NAME:
                         if str2bool(value.get('repo_private')):
                             # set none for default when updating to
                             # private repo protects against form manipulation
                             v = EMPTY_PERM
-                    perms_update.add((member, v, t))
+                    perms_update.add((member_name, v, t))
 
             value['perms_updates'] = list(perms_update)
             value['perms_new'] = list(perms_new)
@@ -584,16 +581,16 @@
             for k, v, t in perms_new:
                 try:
                     if t == 'user':
-                        self.user_db = User.query() \
+                        _user_db = User.query() \
                             .filter(User.active == True) \
                             .filter(User.username == k).one()
                     if t == 'users_group':
-                        self.user_db = UserGroup.query() \
+                        _user_db = UserGroup.query() \
                             .filter(UserGroup.users_group_active == True) \
                             .filter(UserGroup.users_group_name == k).one()
 
-                except Exception:
-                    log.exception('Updated permission failed')
+                except Exception as e:
+                    log.warning('Error validating %s permission %s', t, k)
                     msg = self.message('perm_new_member_type', state)
                     raise formencode.Invalid(msg, value, state,
                         error_dict=dict(perm_new_member_name=msg)
@@ -782,7 +779,7 @@
 
         def _convert_to_python(self, value, state):
             # filter empty values
-            return filter(lambda s: s not in [None, ''], value)
+            return [s for s in value if s not in [None, '']]
 
         def _validate_python(self, value, state):
             from kallithea.lib import auth_modules
--- a/kallithea/public/fontello/demo.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/public/fontello/demo.html	Sat May 02 21:20:43 2020 +0200
@@ -276,7 +276,7 @@
     }
      </style>
     <link rel="stylesheet" href="css/animation.css"><!--[if IE 7]><link rel="stylesheet" href="css/kallithea-ie7.css"><![endif]-->
-    <script>
+    <script>'use strict';
       function toggleCodes(on) {
         var obj = document.getElementById('icons');
       
--- a/kallithea/public/js/base.js	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/public/js/base.js	Sat May 02 21:20:43 2020 +0200
@@ -171,12 +171,12 @@
             return output.join('');
         }
 
-        var str_format = function() {
+        function str_format() {
             if (!str_format.cache.hasOwnProperty(arguments[0])) {
                 str_format.cache[arguments[0]] = str_format.parse(arguments[0]);
             }
             return str_format.format.call(null, str_format.cache[arguments[0]], arguments);
-        };
+        }
 
         str_format.format = function(parse_tree, argv) {
             var cursor = 1, tree_length = parse_tree.length, node_type = '', arg, output = [], i, k, match, pad, pad_character, pad_length;
@@ -239,7 +239,7 @@
                 else if ((match = /^\x25{2}/.exec(_fmt)) !== null) {
                     parse_tree.push('%');
                 }
-                else if ((match = /^\x25(?:([1-9]\d*)\$|\(([^\)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-fosuxX])/.exec(_fmt)) !== null) {
+                else if ((match = /^\x25(?:([1-9]\d*)\$|\(([^)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-fosuxX])/.exec(_fmt)) !== null) {
                     if (match[2]) {
                         arg_names |= 1;
                         var field_list = [], replacement_field = match[2], field_match = [];
@@ -281,10 +281,6 @@
         return str_format;
     })();
 
-    var vsprintf = function(fmt, argv) {
-        argv.unshift(fmt);
-        return sprintf.apply(null, argv);
-    };
     return {
         'url': function(route_name, params) {
             var result = route_name;
@@ -335,25 +331,10 @@
 })();
 
 
-/* Invoke all functions in callbacks */
-var _run_callbacks = function(callbacks){
-    if (callbacks !== undefined){
-        var _l = callbacks.length;
-        for (var i=0;i<_l;i++){
-            var func = callbacks[i];
-            if(typeof(func)=='function'){
-                try{
-                    func();
-                }catch (err){};
-            }
-        }
-    }
-}
-
 /**
  * turns objects into GET query string
  */
-var _toQueryString = function(o) {
+function _toQueryString(o) {
     if(typeof o !== 'object') {
         return false;
     }
@@ -362,7 +343,7 @@
         _qs.push(encodeURIComponent(_p) + '=' + encodeURIComponent(o[_p]));
     }
     return _qs.join('&');
-};
+}
 
 /**
  * Load HTML into DOM using Ajax
@@ -386,18 +367,18 @@
                     success();
                 }
             })
-        .fail(function(jqXHR, textStatus, errorThrown) {
+        .fail(function(jqXHR, textStatus) {
                 if (textStatus == "abort")
                     return;
                 $target.html('<span class="bg-danger">ERROR: {0}</span>'.format(textStatus));
                 $target.css('opacity','1.0');
             })
         ;
-};
+}
 
-var ajaxGET = function(url, success, failure) {
+function ajaxGET(url, success, failure) {
     if(failure === undefined) {
-        failure = function(jqXHR, textStatus, errorThrown) {
+        failure = function(jqXHR, textStatus) {
                 if (textStatus != "abort")
                     alert("Ajax GET error: " + textStatus);
             };
@@ -405,21 +386,20 @@
     return $.ajax({url: url, headers: {'X-PARTIAL-XHR': '1'}, cache: false})
         .done(success)
         .fail(failure);
-};
+}
 
-var ajaxPOST = function(url, postData, success, failure) {
+function ajaxPOST(url, postData, success, failure) {
     postData['_session_csrf_secret_token'] = _session_csrf_secret_token;
-    var postData = _toQueryString(postData);
     if(failure === undefined) {
-        failure = function(jqXHR, textStatus, errorThrown) {
+        failure = function(jqXHR, textStatus) {
                 if (textStatus != "abort")
                     alert("Error posting to server: " + textStatus);
             };
     }
-    return $.ajax({url: url, data: postData, type: 'POST', headers: {'X-PARTIAL-XHR': '1'}, cache: false})
+    return $.ajax({url: url, data: _toQueryString(postData), type: 'POST', headers: {'X-PARTIAL-XHR': '1'}, cache: false})
         .done(success)
         .fail(failure);
-};
+}
 
 
 /**
@@ -427,45 +407,47 @@
  * the .show_more must have an id that is the the id of an element to hide prefixed with _
  * the parentnode will be displayed
  */
-var show_more_event = function(){
+function show_more_event(){
     $('.show_more').click(function(e){
         var el = e.currentTarget;
         $('#' + el.id.substring(1)).hide();
         $(el.parentNode).show();
     });
-};
+}
 
 
-var _onSuccessFollow = function(target){
+function _onSuccessFollow(target){
     var $target = $(target);
     var $f_cnt = $('#current_followers_count');
     if ($target.hasClass('follow')) {
         $target.removeClass('follow').addClass('following');
         $target.prop('title', _TM['Stop following this repository']);
         if ($f_cnt.html()) {
-            var cnt = Number($f_cnt.html())+1;
+            const cnt = Number($f_cnt.html())+1;
             $f_cnt.html(cnt);
         }
     } else {
         $target.removeClass('following').addClass('follow');
         $target.prop('title', _TM['Start following this repository']);
         if ($f_cnt.html()) {
-            var cnt = Number($f_cnt.html())-1;
+            const cnt = Number($f_cnt.html())-1;
             $f_cnt.html(cnt);
         }
     }
 }
 
-var toggleFollowingRepo = function(target, follows_repository_id){
-    var args = 'follows_repository_id=' + follows_repository_id;
-    args += '&amp;_session_csrf_secret_token=' + _session_csrf_secret_token;
-    $.post(TOGGLE_FOLLOW_URL, args, function(data){
+function toggleFollowingRepo(target, follows_repository_id){
+    var args = {
+        'follows_repository_id': follows_repository_id,
+        '_session_csrf_secret_token': _session_csrf_secret_token
+    }
+    $.post(TOGGLE_FOLLOW_URL, args, function(){
             _onSuccessFollow(target);
         });
     return false;
-};
+}
 
-var showRepoSize = function(target, repo_name){
+function showRepoSize(target, repo_name){
     var args = '_session_csrf_secret_token=' + _session_csrf_secret_token;
 
     if(!$("#" + target).hasClass('loaded')){
@@ -477,12 +459,12 @@
         });
     }
     return false;
-};
+}
 
 /**
  * load tooltips dynamically based on data attributes, used for .lazy-cs changeset links
  */
-var get_changeset_tooltip = function() {
+function get_changeset_tooltip() {
     var $target = $(this);
     var tooltip = $target.data('tooltip');
     if (!tooltip) {
@@ -499,12 +481,12 @@
         $target.data('tooltip', tooltip);
     }
     return tooltip;
-};
+}
 
 /**
  * activate tooltips and popups
  */
-var tooltip_activate = function(){
+function tooltip_activate(){
     function placement(p, e){
         if(e.getBoundingClientRect().top > 2*$(window).height()/3){
             return 'top';
@@ -529,63 +511,7 @@
             placement: placement
         });
     });
-};
-
-
-/**
- * Quick filter widget
- *
- * @param target: filter input target
- * @param nodes: list of nodes in html we want to filter.
- * @param display_element function that takes current node from nodes and
- *    does hide or show based on the node
- */
-var q_filter = (function() {
-    var _namespace = {};
-    var namespace = function (target) {
-        if (!(target in _namespace)) {
-            _namespace[target] = {};
-        }
-        return _namespace[target];
-    };
-    return function (target, $nodes, display_element) {
-        var $nodes = $nodes;
-        var $q_filter_field = $('#' + target);
-        var F = namespace(target);
-
-        $q_filter_field.keyup(function (e) {
-            clearTimeout(F.filterTimeout);
-            F.filterTimeout = setTimeout(F.updateFilter, 600);
-        });
-
-        F.filterTimeout = null;
-
-        F.updateFilter = function () {
-            // Reset timeout
-            F.filterTimeout = null;
-
-            var obsolete = [];
-
-            var req = $q_filter_field.val().toLowerCase();
-
-            var showing = 0;
-            $nodes.each(function () {
-                var n = this;
-                var target_element = display_element(n);
-                if (req && n.innerHTML.toLowerCase().indexOf(req) == -1) {
-                    $(target_element).hide();
-                }
-                else {
-                    $(target_element).show();
-                    showing += 1;
-                }
-            });
-
-            $('#repo_count').html(showing);
-            /* FIXME: don't hardcode */
-        }
-    }
-})();
+}
 
 
 /**
@@ -661,12 +587,13 @@
     var addlabel = TRANSLATION_MAP['Add Another Comment'];
     var $add = $('<div class="add-button-row"><span class="btn btn-default btn-xs add-button">{0}</span></div>'.format(addlabel));
     $comment_div.append($add);
-    $add.children('.add-button').click(function(e) {
+    $add.children('.add-button').click(function() {
         comment_div_state($comment_div, f_path, line_no, true);
     });
 }
 
 // append a comment form to $comment_div
+// Note: var AJAX_COMMENT_URL must have been defined before invoking this function
 function _comment_div_append_form($comment_div, f_path, line_no) {
     var $form_div = $('#comment-inline-form-template').children()
         .clone()
@@ -725,7 +652,7 @@
             'save_close': pr_close,
             'save_delete': pr_delete
         };
-        var success = function(json_data) {
+        function success(json_data) {
             if (pr_delete) {
                 location = json_data['location'];
             } else {
@@ -738,8 +665,8 @@
                     location.reload(true);
                 }
             }
-        };
-        var failure = function(x, s, e) {
+        }
+        function failure(x, s, e) {
             $preview.removeClass('submitting').addClass('failed');
             var $status = $preview.find('.comment-submission-status');
             $('<span>', {
@@ -764,12 +691,12 @@
                     comment_div_state($comment_div, f_path, line_no);
                 })
             ).appendTo($status);
-        };
+        }
         ajaxPOST(AJAX_COMMENT_URL, postData, success, failure);
     });
 
     // add event handler for hide/cancel buttons
-    $form.find('.hide-inline-form').click(function(e) {
+    $form.find('.hide-inline-form').click(function() {
         comment_div_state($comment_div, f_path, line_no);
     });
 
@@ -783,10 +710,11 @@
 }
 
 
+// Note: var AJAX_COMMENT_URL must have been defined before invoking this function
 function deleteComment(comment_id) {
     var url = AJAX_COMMENT_DELETE_URL.replace('__COMMENT_ID__', comment_id);
     var postData = {};
-    var success = function(o) {
+    function success() {
         $('#comment-'+comment_id).remove();
         // Ignore that this might leave a stray Add button (or have a pending form with another comment) ...
     }
@@ -797,7 +725,7 @@
 /**
  * Double link comments
  */
-var linkInlineComments = function($firstlinks, $comments){
+function linkInlineComments($firstlinks, $comments){
     if ($comments.length > 0) {
         $firstlinks.html('<a href="#{0}">First comment</a>'.format($comments.prop('id')));
     }
@@ -805,7 +733,7 @@
         return;
     }
 
-    $comments.each(function(i, e){
+    $comments.each(function(i){
             var prev = '';
             if (i > 0){
                 var prev_anchor = $($comments.get(i-1)).prop('id');
@@ -823,13 +751,13 @@
 }
 
 /* activate files.html stuff */
-var fileBrowserListeners = function(node_list_url, url_base){
+function fileBrowserListeners(node_list_url, url_base){
     var $node_filter = $('#node_filter');
 
     var filterTimeout = null;
     var nodes = null;
 
-    var initFilter = function(){
+    function initFilter(){
         $('#node_filter_box_loading').show();
         $('#search_activate_id').hide();
         $('#add_node_id').hide();
@@ -850,7 +778,7 @@
         ;
     }
 
-    var updateFilter = function(e) {
+    function updateFilter(e) {
         return function(){
             // Reset timeout
             filterTimeout = null;
@@ -897,7 +825,7 @@
                 $('#tbody_filtered').hide();
             }
         }
-    };
+    }
 
     $('#filter_activate').click(function(){
             initFilter();
@@ -912,10 +840,10 @@
             clearTimeout(filterTimeout);
             filterTimeout = setTimeout(updateFilter(e),600);
         });
-};
+}
 
 
-var initCodeMirror = function(textarea_id, baseUrl, resetUrl){
+function initCodeMirror(textarea_id, baseUrl, resetUrl){
     var myCodeMirror = CodeMirror.fromTextArea($('#' + textarea_id)[0], {
             mode: "null",
             lineNumbers: true,
@@ -924,7 +852,7 @@
         });
     CodeMirror.modeURL = baseUrl + "/codemirror/mode/%N/%N.js";
 
-    $('#reset').click(function(e){
+    $('#reset').click(function(){
             window.location=resetUrl;
         });
 
@@ -941,14 +869,14 @@
         });
 
     return myCodeMirror
-};
+}
 
-var setCodeMirrorMode = function(codeMirrorInstance, mode) {
+function setCodeMirrorMode(codeMirrorInstance, mode) {
     CodeMirror.autoLoadMode(codeMirrorInstance, mode);
 }
 
 
-var _getIdentNode = function(n){
+function _getIdentNode(n){
     //iterate thrugh nodes until matching interesting node
 
     if (typeof n == 'undefined'){
@@ -961,11 +889,11 @@
     else{
         return _getIdentNode(n.parentNode);
     }
-};
+}
 
 /* generate links for multi line selects that can be shown by files.html page_highlights.
  * This is a mouseup handler for hlcode from CodeHtmlFormatter and pygmentize */
-var getSelectionLink = function(e) {
+function getSelectionLink() {
     //get selection from start/to nodes
     if (typeof window.getSelection != "undefined") {
         var s = window.getSelection();
@@ -973,8 +901,8 @@
         var from = _getIdentNode(s.anchorNode);
         var till = _getIdentNode(s.focusNode);
 
-        var f_int = parseInt(from.id.replace('L',''));
-        var t_int = parseInt(till.id.replace('L',''));
+        //var f_int = parseInt(from.id.replace('L',''));
+        //var t_int = parseInt(till.id.replace('L',''));
 
         var yoffset = 35;
         var ranges = [parseInt(from.id.replace('L','')), parseInt(till.id.replace('L',''))];
@@ -1002,53 +930,15 @@
             $hl_div.hide();
         }
     }
-};
+}
 
 /**
  * Autocomplete functionality
  */
 
-// Custom search function for the DataSource of users
-var autocompleteMatchUsers = function (sQuery, myUsers) {
-    // Case insensitive matching
-    var query = sQuery.toLowerCase();
-    var i = 0;
-    var l = myUsers.length;
-    var matches = [];
-
-    // Match against each name of each contact
-    for (; i < l; i++) {
-        var contact = myUsers[i];
-        if (((contact.fname+"").toLowerCase().indexOf(query) > -1) ||
-             ((contact.lname+"").toLowerCase().indexOf(query) > -1) ||
-             ((contact.nname) && ((contact.nname).toLowerCase().indexOf(query) > -1))) {
-            matches[matches.length] = contact;
-        }
-    }
-    return matches;
-};
-
-// Custom search function for the DataSource of userGroups
-var autocompleteMatchGroups = function (sQuery, myGroups) {
-    // Case insensitive matching
-    var query = sQuery.toLowerCase();
-    var i = 0;
-    var l = myGroups.length;
-    var matches = [];
-
-    // Match against each name of each group
-    for (; i < l; i++) {
-        var matched_group = myGroups[i];
-        if (matched_group.grname.toLowerCase().indexOf(query) > -1) {
-            matches[matches.length] = matched_group;
-        }
-    }
-    return matches;
-};
-
 // Highlight the snippet if it is found in the full text, while escaping any existing markup.
 // Snippet must be lowercased already.
-var autocompleteHighlightMatch = function (full, snippet) {
+function autocompleteHighlightMatch(full, snippet) {
     var matchindex = full.toLowerCase().indexOf(snippet);
     if (matchindex <0)
         return full.html_escape();
@@ -1057,10 +947,10 @@
         + full.substr(matchindex, snippet.length).html_escape()
         + '</span>'
         + full.substring(matchindex + snippet.length).html_escape();
-};
+}
 
 // Return html snippet for showing the provided gravatar url
-var gravatar = function(gravatar_lnk, size, cssclass) {
+function gravatar(gravatar_lnk, size, cssclass) {
     if (!gravatar_lnk) {
         return '';
     }
@@ -1072,7 +962,7 @@
             '></i>').format(size, gravatar_lnk, cssclass);
 }
 
-var autocompleteGravatar = function(res, gravatar_lnk, size, group) {
+function autocompleteGravatar(res, gravatar_lnk, size, group) {
     var elem;
     if (group !== undefined) {
         elem = '<i class="perm-gravatar-ac icon-users"></i>';
@@ -1083,7 +973,7 @@
 }
 
 // Custom formatter to highlight the matching letters and do HTML escaping
-var autocompleteFormatter = function (oResultData, sQuery, sResultMatch) {
+function autocompleteFormatter(oResultData, sQuery, sResultMatch) {
     var query;
     if (sQuery && sQuery.toLowerCase) // YAHOO AutoComplete
         query = sQuery.toLowerCase();
@@ -1113,9 +1003,9 @@
     }
 
     return '';
-};
+}
 
-var SimpleUserAutoComplete = function ($inputElement) {
+function SimpleUserAutoComplete($inputElement) {
     $inputElement.select2({
         formatInputTooShort: $inputElement.attr('placeholder'),
         initSelection : function (element, callback) {
@@ -1134,12 +1024,12 @@
         ajax: {
             url: pyroutes.url('users_and_groups_data'),
             dataType: 'json',
-            data: function(term, page){
+            data: function(term){
               return {
                 query: term
               };
             },
-            results: function (data, page){
+            results: function (data){
               return data;
             },
             cache: true
@@ -1150,7 +1040,7 @@
     });
 }
 
-var MembersAutoComplete = function ($inputElement, $typeElement) {
+function MembersAutoComplete($inputElement, $typeElement) {
 
     $inputElement.select2({
         placeholder: $inputElement.attr('placeholder'),
@@ -1158,13 +1048,13 @@
         ajax: {
             url: pyroutes.url('users_and_groups_data'),
             dataType: 'json',
-            data: function(term, page){
+            data: function(term){
               return {
                 query: term,
                 types: 'users,groups'
               };
             },
-            results: function (data, page){
+            results: function (data){
               return data;
             },
             cache: true
@@ -1178,7 +1068,7 @@
     });
 }
 
-var MentionsAutoComplete = function ($inputElement) {
+function MentionsAutoComplete($inputElement) {
   $inputElement.atwho({
     at: "@",
     callbacks: {
@@ -1194,7 +1084,7 @@
           }
         );
       },
-      sorter: function(query, items, searchKey) {
+      sorter: function(query, items) {
         return items;
       }
     },
@@ -1207,28 +1097,10 @@
     },
     insertTpl: "${atwho-at}${nname}"
   });
-};
-
-
-// Set caret at the given position in the input element
-function _setCaretPosition($inputElement, caretPos) {
-    $inputElement.each(function(){
-        if(this.createTextRange) { // IE
-            var range = this.createTextRange();
-            range.move('character', caretPos);
-            range.select();
-        }
-        else if(this.selectionStart) { // other recent browsers
-            this.focus();
-            this.setSelectionRange(caretPos, caretPos);
-        }
-        else // last resort - very old browser
-            this.focus();
-    });
 }
 
 
-var addReviewMember = function(id,fname,lname,nname,gravatar_link,gravatar_size){
+function addReviewMember(id,fname,lname,nname,gravatar_link,gravatar_size){
     var displayname = nname;
     if ((fname != "") && (lname != "")) {
         displayname = "{0} {1} ({2})".format(fname, lname, nname);
@@ -1265,7 +1137,7 @@
     }
 }
 
-var removeReviewMember = function(reviewer_id, repo_name, pull_request_id){
+function removeReviewMember(reviewer_id){
     var $li = $('#reviewer_{0}'.format(reviewer_id));
     $li.find('div div').css("text-decoration", "line-through");
     $li.find('input').prop('name', 'review_members_removed');
@@ -1273,7 +1145,7 @@
 }
 
 /* activate auto completion of users as PR reviewers */
-var PullRequestAutoComplete = function ($inputElement) {
+function PullRequestAutoComplete($inputElement) {
     $inputElement.select2(
     {
         placeholder: $inputElement.attr('placeholder'),
@@ -1281,12 +1153,12 @@
         ajax: {
             url: pyroutes.url('users_and_groups_data'),
             dataType: 'json',
-            data: function(term, page){
+            data: function(term){
               return {
                 query: term
               };
             },
-            results: function (data, page){
+            results: function (data){
               return data;
             },
             cache: true
@@ -1320,12 +1192,12 @@
 }
 
 function ajaxActionRevokePermission(url, obj_id, obj_type, field_id, extra_data) {
-    var success = function (o) {
+    function success() {
             $('#' + field_id).remove();
-        };
-    var failure = function (o) {
+        }
+    function failure(o) {
             alert(_TM['Failed to revoke permission'] + ": " + o.status);
-        };
+        }
     var query_params = {};
     // put extra data into POST
     if (extra_data !== undefined && (typeof extra_data === 'object')){
@@ -1344,11 +1216,11 @@
     }
 
     ajaxPOST(url, query_params, success, failure);
-};
+}
 
 /* Multi selectors */
 
-var MultiSelectWidget = function(selected_id, available_id, form_id){
+function MultiSelectWidget(selected_id, available_id, form_id){
     var $availableselect = $('#' + available_id);
     var $selectedselect = $('#' + selected_id);
 
@@ -1363,10 +1235,10 @@
             return false;
         }).remove();
 
-    $('#add_element').click(function(e){
+    $('#add_element').click(function(){
             $selectedselect.append($availableselect.children('option:selected'));
         });
-    $('#remove_element').click(function(e){
+    $('#remove_element').click(function(){
             $availableselect.append($selectedselect.children('option:selected'));
         });
 
@@ -1382,7 +1254,7 @@
  Branch Sorting callback for select2, modifying the filtered result so prefix
  matches come before matches in the line.
  **/
-var branchSort = function(results, container, query) {
+function branchSort(results, container, query) {
     if (query.term) {
         return results.sort(function (a, b) {
             // Put closed branches after open ones (a bit of a hack ...)
@@ -1416,9 +1288,9 @@
         });
     }
     return results;
-};
+}
 
-var prefixFirstSort = function(results, container, query) {
+function prefixFirstSort(results, container, query) {
     if (query.term) {
         return results.sort(function (a, b) {
             // if parent node, no sorting
@@ -1447,23 +1319,23 @@
         });
     }
     return results;
-};
+}
 
 /* Helper for jQuery DataTables */
 
-var updateRowCountCallback = function updateRowCountCallback($elem, onlyDisplayed) {
+function updateRowCountCallback($elem, onlyDisplayed) {
     return function drawCallback() {
         var info = this.api().page.info(),
             count = onlyDisplayed === true ? info.recordsDisplay : info.recordsTotal;
         $elem.html(count);
     }
-};
+}
 
 
 /**
  * activate changeset parent/child navigation links
  */
-var activate_parent_child_links = function(){
+function activate_parent_child_links(){
 
     $('.parent-child-link').on('click', function(e){
         var $this = $(this);
--- a/kallithea/public/js/codemirror_loadmode.js	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/public/js/codemirror_loadmode.js	Sat May 02 21:20:43 2020 +0200
@@ -1,3 +1,5 @@
+'use strict';
+
 (function() {
   var loading = {};
   function splitCallback(cont, n) {
@@ -8,13 +10,13 @@
     var deps = CodeMirror.modes[mode].dependencies;
     if (!deps) return cont();
     var missing = [];
-    for (var i = 0; i < deps.length; ++i) {
+    for (let i = 0; i < deps.length; ++i) {
       if (!CodeMirror.modes.hasOwnProperty(deps[i]))
         missing.push(deps[i]);
     }
     if (!missing.length) return cont();
     var split = splitCallback(cont, missing.length);
-    for (var i = 0; i < missing.length; ++i)
+    for (let i = 0; i < missing.length; ++i)
       CodeMirror.requireMode(missing[i], split);
   }
 
@@ -60,10 +62,11 @@
 
   CodeMirror.getFilenameAndExt = function(filename){
     var parts = filename.split('.');
+    var ext;
 
     if (parts.length > 1){
-        var ext = parts.pop();
-        var filename = parts.join("");
+        ext = parts.pop();
+        filename = parts.join("");
     }
     return {"filename": filename, "ext": ext};
   };
--- a/kallithea/public/js/graph.js	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/public/js/graph.js	Sat May 02 21:20:43 2020 +0200
@@ -1,3 +1,5 @@
+'use strict';
+
 // branch_renderer.js - Rendering of branch DAGs on the client side
 //
 // Copyright 2010 Marcin Kuzminski <marcin AT python-works DOT com>
@@ -32,7 +34,7 @@
 	if (!document.createElement("canvas").getContext)
 		this.canvas = window.G_vmlCanvasManager.initElement(this.canvas);
 	if (!this.canvas) { // canvas creation did for some reason fail - fail silently
-		this.render = function(data) {};
+		this.render = function() {};
 		return;
 	}
 	this.ctx = this.canvas.getContext('2d');
@@ -80,7 +82,7 @@
 		}
 
 		var lineCount = 1;
-		for (var i=0;i<data.length;i++) {
+		for (let i=0;i<data.length;i++) {
 			var in_l = data[i][1];
 			for (var j in in_l) {
 				var m = in_l[j][0];
@@ -93,7 +95,7 @@
 		var box_size = Math.min(18, (canvasWidth - edge_pad * 2) / lineCount);
 		var base_x = canvasWidth - edge_pad;
 
-		for (var i=0; i < data.length; ++i) {
+		for (let i=0; i < data.length; ++i) {
 			var row = document.getElementById(row_id_prefix+idx);
 			if (row == null) {
 				console.log("error: row "+row_id_prefix+idx+" not found");
@@ -102,15 +104,15 @@
 			var next = document.getElementById(row_id_prefix+(idx+1));
 			var extra = 0;
 
-			cur = data[i];
-			node = cur[0];
-			in_l = cur[1];
-			closing = cur[2];
-			obsolete_node = cur[3];
-			bumped_node = cur[4];
-			divergent_node = cur[5];
-			extinct_node = cur[6];
-			unstable_node = cur[7];
+			const cur = data[i];
+			const node = cur[0];
+			const in_l = cur[1];
+			const closing = cur[2];
+			const obsolete_node = cur[3];
+			//const bumped_node = cur[4];
+			//const divergent_node = cur[5];
+			//const extinct_node = cur[6];
+			const unstable_node = cur[7];
 
 			// center dots on the first element in a td (not necessarily the first one, but there must be one)
 			var firstincell = $(row).find('td>*:visible')[0];
@@ -118,21 +120,21 @@
 			var rowY = Math.floor(row.offsetTop + firstincell.offsetTop + firstincell.offsetHeight/2);
 			var nextY = Math.floor((next == null) ? rowY + row.offsetHeight/2 : next.offsetTop + nextFirstincell.offsetTop + nextFirstincell.offsetHeight/2);
 
-			for (var j in in_l) {
-				line = in_l[j];
-				start = line[0];
-				end = line[1];
-				color = line[2];
-				obsolete_line = line[3];
+			for (let j in in_l) {
+				const line = in_l[j];
+				const start = line[0];
+				const end = line[1];
+				const color = line[2];
+				const obsolete_line = line[3];
 
-				x = Math.floor(base_x - box_size * start);
+				const x = Math.floor(base_x - box_size * start);
 
 				// figure out if this is a dead-end;
 				// we want to fade away this line
 				var dead_end = true;
 				if (next != null) {
-					nextdata = data[i+1];
-					next_l = nextdata[1];
+					const nextdata = data[i+1];
+					const next_l = nextdata[1];
 					for (var k=0; k < next_l.length; ++k) {
 						if (next_l[k][0] == end) {
 							dead_end = false;
@@ -144,7 +146,7 @@
 				}
 
 				if (dead_end) {
-					var gradient = this.ctx.createLinearGradient(x,rowY,x,nextY);
+					let gradient = this.ctx.createLinearGradient(x,rowY,x,nextY);
 					gradient.addColorStop(0,this.calcColor(color, 0.0, 0.65));
 					gradient.addColorStop(1,this.calcColor(color, 1.0, 0.0));
 					this.ctx.strokeStyle = gradient;
@@ -155,7 +157,7 @@
 				// the merged color
 				else if (color != node[1] && start == node[0])
 				{
-					var gradient = this.ctx.createLinearGradient(x,rowY,x,nextY);
+					let gradient = this.ctx.createLinearGradient(x,rowY,x,nextY);
 					gradient.addColorStop(0,this.calcColor(node[1], 0.0, 0.65));
 					gradient.addColorStop(1,this.calcColor(color, 0.0, 0.65));
 					this.ctx.strokeStyle = gradient;
@@ -192,10 +194,10 @@
 				this.ctx.setLineDash([]); // reset the dashed line, if any
 			}
 
-			column = node[0];
-			color = node[1];
+			const column = node[0];
+			const color = node[1];
 
-			x = Math.floor(base_x - box_size * column);
+			const x = Math.floor(base_x - box_size * column);
 
 			this.setColor(color, 0.25, 0.75);
 			if(unstable_node)
@@ -203,7 +205,7 @@
 				this.ctx.fillStyle = 'rgb(255, 0, 0)';
 			}
 
-			r = this.dot_radius
+			let r = this.dot_radius
 			if (obsolete_node)
 			{
 				this.ctx.beginPath();
--- a/kallithea/public/js/mergely.js	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/public/js/mergely.js	Sat May 02 21:20:43 2020 +0200
@@ -699,7 +699,7 @@
 		// resize
 		if (this.settings.autoresize) {
 			var sz_timeout1 = null;
-			var sz = function(init) {
+			function sz(init) {
 				//self.em_height = null; //recalculate
 				if (self.settings.resize) self.settings.resize(init);
 				self.editor[self.id + '-lhs'].refresh();
@@ -854,7 +854,7 @@
 	_clear: function() {
 		var self = this, name, editor, fns, timer, i, change, l;
 
-		var clear_changes = function() {
+		function clear_changes() {
 			timer = new Mgly.Timer();
 			for (i = 0, l = editor.lineCount(); i < l; ++i) {
 				editor.removeLineClass(i, 'background');
--- a/kallithea/templates/admin/admin.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/admin.html	Sat May 02 21:20:43 2020 +0200
@@ -27,7 +27,7 @@
     </div>
 </div>
 
-<script>
+<script>'use strict';
 $(document).ready(function() {
   $('#j_filter').click(function(){
     var $jfilter = $('#j_filter');
@@ -35,9 +35,9 @@
         $jfilter.val('');
     }
   });
-  var fix_j_filter_width = function(len){
+  function fix_j_filter_width(len){
       $('#j_filter').css('width', Math.max(80, len*6.50)+'px');
-  };
+  }
   $('#j_filter').keyup(function () {
     fix_j_filter_width($('#j_filter').val().length);
   });
--- a/kallithea/templates/admin/admin_log.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/admin_log.html	Sat May 02 21:20:43 2020 +0200
@@ -37,7 +37,7 @@
     %endfor
 </table>
 
-<script type="text/javascript">
+<script>'use strict';
   $(document).ready(function(){
     var $user_log = $('#user_log');
     $user_log.on('click','.pager_link',function(e){
--- a/kallithea/templates/admin/auth/auth_settings.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/auth/auth_settings.html	Sat May 02 21:20:43 2020 +0200
@@ -105,10 +105,10 @@
     </div>
 </div>
 
-<script>
+<script>'use strict';
     $('.toggle-plugin').click(function(e){
         var $auth_plugins_input = $('#auth_plugins');
-        var notEmpty = function(element, index, array) {
+        function notEmpty(element) {
             return (element != "");
         }
         var elems = $auth_plugins_input.val().split(',').filter(notEmpty);
--- a/kallithea/templates/admin/gists/edit.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/gists/edit.html	Sat May 02 21:20:43 2020 +0200
@@ -6,9 +6,9 @@
 </%block>
 
 <%block name="js_extra">
-  <script type="text/javascript" src="${h.url('/codemirror/lib/codemirror.js')}"></script>
-  <script type="text/javascript" src="${h.url('/js/codemirror_loadmode.js')}"></script>
-  <script type="text/javascript" src="${h.url('/codemirror/mode/meta.js')}"></script>
+  <script src="${h.url('/codemirror/lib/codemirror.js')}"></script>
+  <script src="${h.url('/js/codemirror_loadmode.js')}"></script>
+  <script src="${h.url('/codemirror/mode/meta.js')}"></script>
 </%block>
 <%block name="css_extra">
   <link rel="stylesheet" type="text/css" href="${h.url('/codemirror/lib/codemirror.css')}"/>
@@ -35,7 +35,7 @@
               ${(h.HTML(_('Gist was updated since you started editing. Copy your changes and click %(here)s to reload new version.'))
                              % {'here': h.link_to(_('here'),h.url('edit_gist', gist_id=c.gist.gist_access_id))})}
             </div>
-            <script>
+            <script>'use strict';
             if (typeof jQuery != 'undefined') {
                 $(".alert").alert();
             }
@@ -67,19 +67,19 @@
             % for cnt, file in enumerate(c.files):
                 <div id="body" class="panel panel-default form-inline">
                     <div class="panel-heading">
-                        <input type="hidden" value="${h.safe_unicode(file.path)}" name="org_files">
-                        <input class="form-control" id="filename_${h.FID('f',file.path)}" name="files" size="30" type="text" value="${h.safe_unicode(file.path)}">
+                        <input type="hidden" value="${file.path}" name="org_files">
+                        <input class="form-control" id="filename_${h.FID('f',file.path)}" name="files" size="30" type="text" value="${file.path}">
                         <select class="form-control" id="mimetype_${h.FID('f',file.path)}" name="mimetypes"></select>
                     </div>
                     <div class="panel-body no-padding">
                         <div id="editor_container">
-                            <textarea id="editor_${h.FID('f',file.path)}" name="contents" style="display:none">${file.content}</textarea>
+                            <textarea id="editor_${h.FID('f',file.path)}" name="contents" style="display:none">${safe_str(file.content)}</textarea>
                         </div>
                     </div>
                 </div>
 
                 ## dynamic edit box.
-                <script type="text/javascript">
+                <script>'use strict';
                     $(document).ready(function(){
                         var myCodeMirror = initCodeMirror(${h.js('editor_' + h.FID('f',file.path))}, ${h.jshtml(request.script_name)}, '');
 
@@ -117,7 +117,7 @@
                         });
 
                         // on type the new filename set mode
-                        $filename_input.keyup(function(e){
+                        $filename_input.keyup(function(){
                             var file_data = CodeMirror.getFilenameAndExt(this.value);
                             if(file_data['ext'] != null){
                                 var detected_mode = CodeMirror.findModeByExtension(file_data['ext']) || CodeMirror.findModeByMIME('text/plain');
@@ -146,7 +146,7 @@
             <a class="btn btn-default" href="${h.url('gist', gist_id=c.gist.gist_access_id)}">${_('Cancel')}</a>
             </div>
           ${h.end_form()}
-          <script>
+          <script>'use strict';
               $('#update').on('click', function(e){
                   e.preventDefault();
 
--- a/kallithea/templates/admin/gists/index.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/gists/index.html	Sat May 02 21:20:43 2020 +0200
@@ -61,7 +61,7 @@
             <div class="text-muted">${gist.gist_description}</div>
           </div>
         % endfor
-        ${c.gists_pager.pager(**request.GET.mixed())}
+        ${c.gists_pager.pager()}
       %else:
         <div>${_('There are no gists yet')}</div>
       %endif
--- a/kallithea/templates/admin/gists/new.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/gists/new.html	Sat May 02 21:20:43 2020 +0200
@@ -6,9 +6,9 @@
 </%block>
 
 <%block name="js_extra">
-  <script type="text/javascript" src="${h.url('/codemirror/lib/codemirror.js')}"></script>
-  <script type="text/javascript" src="${h.url('/js/codemirror_loadmode.js')}"></script>
-  <script type="text/javascript" src="${h.url('/codemirror/mode/meta.js')}"></script>
+  <script src="${h.url('/codemirror/lib/codemirror.js')}"></script>
+  <script src="${h.url('/js/codemirror_loadmode.js')}"></script>
+  <script src="${h.url('/codemirror/mode/meta.js')}"></script>
 </%block>
 <%block name="css_extra">
   <link rel="stylesheet" type="text/css" href="${h.url('/codemirror/lib/codemirror.css')}"/>
@@ -55,7 +55,7 @@
             ${h.reset('reset',_('Reset'),class_="btn btn-default btn-xs")}
             </div>
           ${h.end_form()}
-          <script type="text/javascript">
+          <script>'use strict';
             $(document).ready(function(){
                 var myCodeMirror = initCodeMirror('editor', ${h.jshtml(request.script_name)}, '');
 
@@ -93,7 +93,7 @@
                 });
 
                 // on type the new filename set mode
-                $filename_input.keyup(function(e){
+                $filename_input.keyup(function(){
                     var file_data = CodeMirror.getFilenameAndExt(this.value);
                     if(file_data['ext'] != null){
                         var detected_mode = CodeMirror.findModeByExtension(file_data['ext']) || CodeMirror.findModeByMIME('text/plain');
--- a/kallithea/templates/admin/gists/show.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/gists/show.html	Sat May 02 21:20:43 2020 +0200
@@ -76,10 +76,10 @@
               <div class="panel panel-default">
                 <div id="${h.FID('G', file.path)}" class="panel-heading clearfix">
                     <div class="pull-left">
-                      <b>${h.safe_unicode(file.path)}</b>
+                      <b>${file.path}</b>
                     </div>
                     <div class="pull-right">
-                      ${h.link_to(_('Show as raw'),h.url('formatted_gist_file', gist_id=c.gist.gist_access_id, format='raw', revision=file.changeset.raw_id, f_path=h.safe_unicode(file.path)),class_="btn btn-default btn-xs")}
+                      ${h.link_to(_('Show as raw'),h.url('formatted_gist_file', gist_id=c.gist.gist_access_id, format='raw', revision=file.changeset.raw_id, f_path=file.path),class_="btn btn-default btn-xs")}
                     </div>
                 </div>
                 <div class="panel-body no-padding">
--- a/kallithea/templates/admin/my_account/my_account_api_keys.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/my_account/my_account_api_keys.html	Sat May 02 21:20:43 2020 +0200
@@ -90,7 +90,7 @@
 ''')}</p>
 </div>
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $("#lifetime").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/my_account/my_account_repos.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/my_account/my_account_repos.html	Sat May 02 21:20:43 2020 +0200
@@ -4,9 +4,9 @@
     <table class="table" id="datatable_list_wrap" width="100%"></table>
 </div>
 
-<script>
+<script>'use strict';
   var data = ${h.js(c.data)};
-  var myDataTable = $("#datatable_list_wrap").DataTable({
+  $("#datatable_list_wrap").DataTable({
         data: data.records,
         columns: [
             {data: "raw_name", "visible": false, searchable: false},
--- a/kallithea/templates/admin/my_account/my_account_watched.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/my_account/my_account_watched.html	Sat May 02 21:20:43 2020 +0200
@@ -4,9 +4,9 @@
     <table class="table" id="datatable_list_wrap" width="100%"></table>
 </div>
 
-<script>
+<script>'use strict';
   var data = ${h.js(c.data)};
-  var myDataTable = $("#datatable_list_wrap").DataTable({
+  $("#datatable_list_wrap").DataTable({
         data: data.records,
         columns: [
             {data: "raw_name", "visible": false, searchable: false},
--- a/kallithea/templates/admin/repo_groups/repo_group_add.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_group_add.html	Sat May 02 21:20:43 2020 +0200
@@ -61,9 +61,9 @@
     </div>
     ${h.end_form()}
 </div>
-<script>
+<script>'use strict';
     $(document).ready(function(){
-        var setCopyPermsOption = function(group_val){
+        function setCopyPermsOption(group_val){
             if(group_val != "-1"){
                 $('#copy_perms').show();
             }
--- a/kallithea/templates/admin/repo_groups/repo_group_edit_perms.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_group_edit_perms.html	Sat May 02 21:20:43 2020 +0200
@@ -102,15 +102,15 @@
 </div>
 ${h.end_form()}
 
-<script type="text/javascript">
+<script>'use strict';
     function ajaxActionRevoke(obj_id, obj_type, field_id, obj_name) {
-        url = ${h.jshtml(h.url('edit_repo_group_perms_delete', group_name=c.repo_group.group_name))};
+        let url = ${h.jshtml(h.url('edit_repo_group_perms_delete', group_name=c.repo_group.group_name))};
         var revoke_msg = _TM['Confirm to revoke permission for {0}: {1} ?'].format(obj_type.replace('_', ' '), obj_name);
         if (confirm(revoke_msg)){
             var recursive = $('input[name=recursive]:checked').val();
             ajaxActionRevokePermission(url, obj_id, obj_type, field_id, {recursive:recursive});
         }
-    };
+    }
 
     $(document).ready(function () {
         if (!$('#perm_new_member_name').hasClass('error')) {
--- a/kallithea/templates/admin/repo_groups/repo_group_edit_settings.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_group_edit_settings.html	Sat May 02 21:20:43 2020 +0200
@@ -41,7 +41,7 @@
 </div>
 ${h.end_form()}
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $("#parent_group_id").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/repo_groups/repo_groups.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_groups.html	Sat May 02 21:20:43 2020 +0200
@@ -30,9 +30,9 @@
         <table class="table" id="datatable_list_wrap" width="100%"></table>
     </div>
 </div>
-<script>
+<script>'use strict';
   var data = ${h.js(c.data)};
-  var myDataTable = $("#datatable_list_wrap").DataTable({
+  $("#datatable_list_wrap").DataTable({
         data: data.records,
         columns: [
             {data: "raw_name", visible: false, searchable: false},
--- a/kallithea/templates/admin/repos/repo_add_base.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repos/repo_add_base.html	Sat May 02 21:20:43 2020 +0200
@@ -65,9 +65,9 @@
             </div>
         </div>
 </div>
-<script>
+<script>'use strict';
     $(document).ready(function(){
-        var setCopyPermsOption = function(group_val){
+        function setCopyPermsOption(group_val){
             if(group_val != "-1"){
                 $('#copy_perms').show();
             }
--- a/kallithea/templates/admin/repos/repo_creating.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repos/repo_creating.html	Sat May 02 21:20:43 2020 +0200
@@ -42,7 +42,7 @@
     </div>
 </div>
 
-<script>
+<script>'use strict';
 (function worker() {
   $.ajax({
     url: ${h.js(h.url('repo_check_home', repo_name=c.repo_name, repo=c.repo, task_id=c.task_id))},
@@ -52,7 +52,7 @@
           window.location = ${h.js(h.url('summary_home', repo_name = c.repo))};
       }
     },
-    complete: function(resp, status) {
+    complete: function(resp) {
       if (resp.status == 200){
           // Schedule the next request when the current one's complete
           setTimeout(worker, 1000);
--- a/kallithea/templates/admin/repos/repo_edit.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repos/repo_edit.html	Sat May 02 21:20:43 2020 +0200
@@ -33,9 +33,6 @@
           <li class="${'active' if c.active=='fields' else ''}">
               <a href="${h.url('edit_repo_fields', repo_name=c.repo_name)}">${_('Extra Fields')}</a>
           </li>
-          <li class="${'active' if c.active=='caches' else ''}">
-              <a href="${h.url('edit_repo_caches', repo_name=c.repo_name)}">${_('Caches')}</a>
-          </li>
           <li class="${'active' if c.active=='remote' else ''}">
               <a href="${h.url('edit_repo_remote', repo_name=c.repo_name)}">${_('Remote')}</a>
           </li>
--- a/kallithea/templates/admin/repos/repo_edit_advanced.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repos/repo_edit_advanced.html	Sat May 02 21:20:43 2020 +0200
@@ -9,7 +9,7 @@
 </div>
 ${h.end_form()}
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $("#id_fork_of").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/repos/repo_edit_caches.html	Thu Apr 09 18:03:56 2020 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,27 +0,0 @@
-${h.form(url('update_repo_caches', repo_name=c.repo_name))}
-<div class="form">
-   <div>
-       ${h.submit('reset_cache_%s' % c.repo_info.repo_name,_('Invalidate Repository Cache'),class_="btn btn-default btn-sm")}
-      <div class="text-muted">
-        ${_('Manually invalidate cache for this repository. On first access, the repository will be cached again.')}
-      </div>
-      <div>
-        <h5>${_('List of Cached Values')}</h5>
-        <table class="table">
-          <tr>
-            <th>${_('Prefix')}</th>
-            <th>${_('Key')}</th>
-            <th>${_('Active')}</th>
-          </tr>
-          %for cache in c.repo_info.cache_keys:
-              <tr>
-                <td>${cache.get_prefix() or '-'}</td>
-                <td>${cache.cache_key}</td>
-                <td>${h.boolicon(cache.cache_active)}</td>
-              </tr>
-          %endfor
-        </table>
-      </div>
-   </div>
-</div>
-${h.end_form()}
--- a/kallithea/templates/admin/repos/repo_edit_permissions.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repos/repo_edit_permissions.html	Sat May 02 21:20:43 2020 +0200
@@ -12,7 +12,7 @@
                     <td></td>
                 </tr>
                 ## USERS
-                %for r2p in sorted(c.repo_info.repo_to_perm, key=lambda x: x.user.username != 'default' and x.user.username):
+                %for r2p in sorted(c.repo_info.repo_to_perm, key=lambda x: '' if x.user.username == 'default' else x.user.username):
                     %if r2p.user.username =='default' and c.repo_info.private:
                         <tr>
                             <td colspan="4">
@@ -87,14 +87,14 @@
 </div>
 ${h.end_form()}
 
-<script type="text/javascript">
+<script>'use strict';
     function ajaxActionRevoke(obj_id, obj_type, field_id, obj_name) {
-        url = ${h.js(h.url('edit_repo_perms_revoke',repo_name=c.repo_name))};
+        let url = ${h.js(h.url('edit_repo_perms_revoke',repo_name=c.repo_name))};
         var revoke_msg = _TM['Confirm to revoke permission for {0}: {1} ?'].format(obj_type.replace('_', ' '), obj_name);
         if (confirm(revoke_msg)){
             ajaxActionRevokePermission(url, obj_id, obj_type, field_id);
         }
-    };
+    }
 
     $(document).ready(function () {
         if (!$('#perm_new_member_name').hasClass('error')) {
--- a/kallithea/templates/admin/repos/repo_edit_settings.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repos/repo_edit_settings.html	Sat May 02 21:20:43 2020 +0200
@@ -103,7 +103,7 @@
     </div>
     ${h.end_form()}
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $('#repo_landing_rev').select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/repos/repos.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/repos/repos.html	Sat May 02 21:20:43 2020 +0200
@@ -29,9 +29,9 @@
     </div>
 
 </div>
-<script>
+<script>'use strict';
   var data = ${h.js(c.data)};
-  var myDataTable = $("#datatable_list_wrap").DataTable({
+  $("#datatable_list_wrap").DataTable({
         data: data.records,
         columns: [
             {data: "raw_name", visible: false, searchable: false},
--- a/kallithea/templates/admin/settings/settings_hooks.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/settings/settings_hooks.html	Sat May 02 21:20:43 2020 +0200
@@ -50,16 +50,16 @@
 ${h.end_form()}
 % endif
 
-<script type="text/javascript">
+<script>'use strict';
 function delete_hook(hook_id, field_id) {
     var sUrl = ${h.js(h.url('admin_settings_hooks_delete'))};
-    var success = function (o) {
+    function success() {
             $('#' + field_id).remove();
-        };
-    var failure = function (o) {
+        }
+    function failure() {
             alert(${h.js(_('Failed to remove hook'))});
-        };
+        }
     var postData = {'hook_id': hook_id};
     ajaxPOST(sUrl, postData, success, failure);
-};
+}
 </script>
--- a/kallithea/templates/admin/settings/settings_vcs.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/settings/settings_vcs.html	Sat May 02 21:20:43 2020 +0200
@@ -69,9 +69,9 @@
     </div>
     ${h.end_form()}
 
-    <script type="text/javascript">
+    <script>'use strict';
         $(document).ready(function(){
-            $('#path_unlock').on('click', function(e){
+            $('#path_unlock').on('click', function(){
                 $('#path_unlock_icon').removeClass('icon-lock');
                 $('#path_unlock_icon').addClass('icon-lock-open-alt');
                 $('#paths_root_path').removeAttr('readonly');
--- a/kallithea/templates/admin/user_groups/user_group_add.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/user_groups/user_group_add.html	Sat May 02 21:20:43 2020 +0200
@@ -52,7 +52,7 @@
     ${h.end_form()}
 </div>
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $('#users_group_name').focus();
     });
--- a/kallithea/templates/admin/user_groups/user_group_edit_perms.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/user_groups/user_group_edit_perms.html	Sat May 02 21:20:43 2020 +0200
@@ -92,14 +92,14 @@
 </div>
 ${h.end_form()}
 
-<script type="text/javascript">
+<script>'use strict';
     function ajaxActionRevoke(obj_id, obj_type, field_id, obj_name) {
-        url = ${h.js(h.url('edit_user_group_perms_delete', id=c.user_group.users_group_id))};
+        let url = ${h.js(h.url('edit_user_group_perms_delete', id=c.user_group.users_group_id))};
         var revoke_msg = _TM['Confirm to revoke permission for {0}: {1} ?'].format(obj_type.replace('_', ' '), obj_name);
         if (confirm(revoke_msg)){
             ajaxActionRevokePermission(url, obj_id, obj_type, field_id);
         }
-    };
+    }
 
     $(document).ready(function () {
         if (!$('#perm_new_member_name').hasClass('error')) {
--- a/kallithea/templates/admin/user_groups/user_group_edit_settings.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/user_groups/user_group_edit_settings.html	Sat May 02 21:20:43 2020 +0200
@@ -48,6 +48,6 @@
                 </div>
     </div>
 ${h.end_form()}
-<script type="text/javascript">
+<script>'use strict';
   MultiSelectWidget('users_group_members','available_members','edit_users_group');
 </script>
--- a/kallithea/templates/admin/user_groups/user_groups.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/user_groups/user_groups.html	Sat May 02 21:20:43 2020 +0200
@@ -29,9 +29,9 @@
         <table class="table" id="datatable_list_wrap" width="100%"></table>
     </div>
 </div>
-<script>
+<script>'use strict';
     var data = ${h.js(c.data)};
-    var $dataTable = $("#datatable_list_wrap").DataTable({
+    $("#datatable_list_wrap").DataTable({
         data: data.records,
         columns: [
             {data: "raw_name", visible: false, searchable: false},
--- a/kallithea/templates/admin/users/user_add.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/users/user_add.html	Sat May 02 21:20:43 2020 +0200
@@ -84,7 +84,7 @@
     ${h.end_form()}
 </div>
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $('#username').focus();
     });
--- a/kallithea/templates/admin/users/user_edit_api_keys.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/users/user_edit_api_keys.html	Sat May 02 21:20:43 2020 +0200
@@ -77,7 +77,7 @@
     ${h.end_form()}
 </div>
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $("#lifetime").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/users/users.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/admin/users/users.html	Sat May 02 21:20:43 2020 +0200
@@ -28,9 +28,9 @@
     </div>
 </div>
 
-<script>
+<script>'use strict';
     var data = ${h.js(c.data)};
-    var $dataTable = $("#datatable_list_wrap").DataTable({
+    $("#datatable_list_wrap").DataTable({
         data: data.records,
         columns: [
             {data: "gravatar", sortable: false, searchable: false},
--- a/kallithea/templates/base/base.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/base/base.html	Sat May 02 21:20:43 2020 +0200
@@ -168,7 +168,7 @@
                     <span class="show-following"><i class="icon-heart"></i>${_('Unfollow')}</span>
                    </a>
                   </li>
-                  <li><a href="${h.url('repo_fork_home',repo_name=c.repo_name)}"><i class="icon-git-pull-request"></i>${_('Fork')}</a></li>
+                  <li><a href="${h.url('repo_fork_home',repo_name=c.repo_name)}"><i class="icon-fork"></i>${_('Fork')}</a></li>
                   <li><a href="${h.url('pullrequest_home',repo_name=c.repo_name)}"><i class="icon-git-pull-request"></i>${_('Create Pull Request')}</a></li>
               %endif
              </ul>
@@ -177,7 +177,7 @@
     </div>
     </div>
   </nav>
-  <script type="text/javascript">
+  <script>'use strict';
     $(document).ready(function() {
       var bcache = {};
 
@@ -192,7 +192,7 @@
           formatSelection: function(obj) {
               return obj.text.html_escape();
           },
-          formatNoMatches: function(term) {
+          formatNoMatches: function() {
               return ${h.jshtml(_('No matches found'))};
           },
           escapeMarkup: function(m) {
@@ -399,12 +399,12 @@
     </li>
   </ul>
 
-    <script type="text/javascript">
+    <script>'use strict';
         $(document).ready(function(){
             var visual_show_public_icon = ${h.js(c.visual.show_public_icon)};
             var cache = {}
             /*format the look of items in the list*/
-            var format = function(state){
+            function format(state){
                 if (!state.id){
                   return state.text.html_escape(); // optgroup
                 }
@@ -441,7 +441,7 @@
                 sortResults: prefixFirstSort,
                 formatResult: format,
                 formatSelection: format,
-                formatNoMatches: function(term){
+                formatNoMatches: function(){
                     return ${h.jshtml(_('No matches found'))};
                 },
                 containerCssClass: "repo-switcher",
@@ -527,7 +527,7 @@
         </div>
     </div>
 
-    <script type="text/javascript">
+    <script>'use strict';
       $(document).ready(function(){
           activate_parent_child_links();
       });
--- a/kallithea/templates/base/flash_msg.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/base/flash_msg.html	Sat May 02 21:20:43 2020 +0200
@@ -5,11 +5,11 @@
         % for message in messages:
             <div class="alert alert-dismissable ${alert_categories[message.category]}" role="alert">
               <button type="button" class="close" data-dismiss="alert" aria-hidden="true"><i class="icon-cancel-circled"></i></button>
-              ${message}
+              ${message.message|n}
             </div>
         % endfor
     % endif
-    <script>
+    <script>'use strict';
     if (typeof jQuery != 'undefined') {
         $(".alert").alert();
     }
--- a/kallithea/templates/base/perms_summary.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/base/perms_summary.html	Sat May 02 21:20:43 2020 +0200
@@ -5,7 +5,7 @@
 
 <%def name="perms_summary(permissions, show_all=False, actions=True)">
 <div id="perms">
-     %for section in sorted(permissions.keys()):
+     %for section in sorted(permissions):
         <div class="perms_section_head">
             <h4>${section.replace("_"," ").capitalize()}</h4>
             %if section != 'global':
@@ -97,9 +97,9 @@
         %endif
      %endfor
 </div>
-<script>
+<script>'use strict';
     $(document).ready(function(){
-        var show_empty = function(section){
+        function show_empty(section){
             var visible = $('.section_{0} tr.perm_row:visible'.format(section)).length;
             if(visible == 0){
                 $('#empty_{0}'.format(section)).show();
@@ -108,10 +108,10 @@
                 $('#empty_{0}'.format(section)).hide();
             }
         }
-        var update_show = function($checkbox){
+        function update_show($checkbox){
             var section = $checkbox.data('section');
 
-            var elems = $('.filter_' + section).each(function(el){
+            $('.filter_' + section).each(function(){
                 var perm_type = $checkbox.data('perm_type');
                 var checked = $checkbox.prop('checked');
                 if(checked){
--- a/kallithea/templates/base/root.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/base/root.html	Sat May 02 21:20:43 2020 +0200
@@ -21,7 +21,7 @@
         <%block name="css_extra"/>
 
         ## JAVASCRIPT ##
-        <script type="text/javascript">
+        <script>'use strict';
             ## JS translations map
             var TRANSLATION_MAP = {
                 'Cancel': ${h.jshtml(_("Cancel"))},
@@ -58,7 +58,7 @@
             };
             var _TM = TRANSLATION_MAP;
 
-            var TOGGLE_FOLLOW_URL  = ${h.js(h.url('toggle_following'))};
+            var TOGGLE_FOLLOW_URL = ${h.js(h.url('toggle_following'))};
 
             var REPO_NAME = "";
             %if hasattr(c, 'repo_name'):
@@ -67,17 +67,17 @@
 
             var _session_csrf_secret_token = ${h.js(h.session_csrf_secret_token())};
         </script>
-        <script type="text/javascript" src="${h.url('/js/jquery.min.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript" src="${h.url('/js/jquery.dataTables.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript" src="${h.url('/js/dataTables.bootstrap.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript" src="${h.url('/js/bootstrap.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript" src="${h.url('/js/select2.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript" src="${h.url('/js/jquery.caret.min.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript" src="${h.url('/js/jquery.atwho.min.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript" src="${h.url('/js/base.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/jquery.min.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/jquery.dataTables.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/dataTables.bootstrap.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/bootstrap.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/select2.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/jquery.caret.min.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/jquery.atwho.min.js', ver=c.kallithea_version)}"></script>
+        <script src="${h.url('/js/base.js', ver=c.kallithea_version)}"></script>
         ## EXTRA FOR JS
         <%block name="js_extra"/>
-        <script type="text/javascript">
+        <script>'use strict';
             $(document).ready(function(){
               tooltip_activate();
               show_more_event();
--- a/kallithea/templates/changelog/changelog.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/changelog/changelog.html	Sat May 02 21:20:43 2020 +0200
@@ -80,8 +80,8 @@
 
                 ${c.cs_pagination.pager()}
 
-        <script type="text/javascript" src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
-        <script type="text/javascript">
+        <script src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
+        <script>'use strict';
             var jsdata = ${h.js(c.jsdata)};
             var graph = new BranchRenderer('graph_canvas', 'graph_content', 'chg_');
 
@@ -90,7 +90,7 @@
 
                 pyroutes.register('changeset_home', ${h.js(h.url('changeset_home', repo_name='%(repo_name)s', revision='%(revision)s'))}, ['repo_name', 'revision']);
 
-                var checkbox_checker = function(e) {
+                function checkbox_checker() {
                     var $checked_checkboxes = $checkboxes.filter(':checked');
                     var $singlerange = $('#singlerange');
 
@@ -163,7 +163,7 @@
                         $('#compare_fork').show();
                         $checkboxes.closest('tr').removeClass('out-of-range');
                     }
-                };
+                }
                 checkbox_checker();
                 $checkboxes.click(function() {
                     checkbox_checker();
@@ -171,7 +171,7 @@
                 });
                 $('#singlerange').click(checkbox_checker);
 
-                $('#rev_range_clear').click(function(e){
+                $('#rev_range_clear').click(function(){
                     $checkboxes.prop('checked', false);
                     checkbox_checker();
                     graph.render(jsdata);
--- a/kallithea/templates/changelog/changelog_table.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/changelog/changelog_table.html	Sat May 02 21:20:43 2020 +0200
@@ -110,9 +110,9 @@
     </tbody>
     </table>
 
-<script type="text/javascript">
+<script>'use strict';
   $(document).ready(function() {
-    $('#changesets .expand_commit').on('click',function(e){
+    $('#changesets .expand_commit').on('click',function(){
       $(this).next('.mid').find('.message > div').toggleClass('hidden');
       ${resize_js};
     });
--- a/kallithea/templates/changeset/changeset.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/changeset/changeset.html	Sat May 02 21:20:43 2020 +0200
@@ -22,9 +22,9 @@
   <div class="panel-heading clearfix">
     ${self.breadcrumbs()}
   </div>
-  <script>
-    AJAX_COMMENT_URL = ${h.js(url('changeset_comment',repo_name=c.repo_name,revision=c.changeset.raw_id))};
-    AJAX_COMMENT_DELETE_URL = ${h.js(url('changeset_comment_delete',repo_name=c.repo_name,comment_id='__COMMENT_ID__'))};
+  <script>'use strict';
+    var AJAX_COMMENT_URL = ${h.js(url('changeset_comment',repo_name=c.repo_name,revision=c.changeset.raw_id))};
+    var AJAX_COMMENT_DELETE_URL = ${h.js(url('changeset_comment_delete',repo_name=c.repo_name,comment_id='__COMMENT_ID__'))};
   </script>
   <div class="panel-body">
     <div class="panel panel-default">
@@ -90,16 +90,14 @@
                          <span><b>${h.person(c.changeset.author,'full_name_and_username')}</b> - ${h.age(c.changeset.date,True)} ${h.fmt_date(c.changeset.date)}</span><br/>
                          <span>${h.email_or_none(c.changeset.author)}</span><br/>
                      </div>
-                     <% rev = c.changeset.extra.get('source') %>
-                     %if rev:
+                     %if c.changeset_graft_source_hash:
                      <div>
-                       ${_('Grafted from:')} ${h.link_to(h.short_id(rev),h.url('changeset_home',repo_name=c.repo_name,revision=rev), class_="changeset_hash")}
+                       ${_('Grafted from:')} ${h.link_to(h.short_id(c.changeset_graft_source_hash),h.url('changeset_home',repo_name=c.repo_name,revision=c.changeset_graft_source_hash), class_="changeset_hash")}
                      </div>
                      %endif
-                     <% rev = c.changeset.extra.get('transplant_source', '').encode('hex') %>
-                     %if rev:
+                     %if c.changeset_transplant_source_hash:
                      <div>
-                       ${_('Transplanted from:')} ${h.link_to(h.short_id(rev),h.url('changeset_home',repo_name=c.repo_name,revision=rev), class_="changeset_hash")}
+                       ${_('Transplanted from:')} ${h.link_to(h.short_id(c.changeset_transplant_source_hash),h.url('changeset_home',repo_name=c.repo_name,revision=c.changeset_transplant_source_hash), class_="changeset_hash")}
                      </div>
                      %endif
 
@@ -145,7 +143,7 @@
                 %for fid, url_fid, op, a_path, path, diff, stats in file_diff_data:
                     <div class="cs_${op} clearfix">
                       <span class="node">
-                          <i class="icon-diff-${op}"></i>${h.link_to(h.safe_unicode(path), '#%s' % fid)}
+                          <i class="icon-diff-${op}"></i>${h.link_to(path, '#%s' % fid)}
                       </span>
                       <div class="changes">${h.fancy_file_stats(stats)}</div>
                     </div>
@@ -186,9 +184,9 @@
     </div>
 
     ## FORM FOR MAKING JS ACTION AS CHANGESET COMMENTS
-    <script type="text/javascript">
+    <script>'use strict';
       $(document).ready(function(){
-          $('.code-difftable').on('click', '.add-bubble', function(e){
+          $('.code-difftable').on('click', '.add-bubble', function(){
               show_comment_form($(this));
           });
 
--- a/kallithea/templates/changeset/changeset_file_comment.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/changeset/changeset_file_comment.html	Sat May 02 21:20:43 2020 +0200
@@ -163,8 +163,8 @@
 ## original location of comments ... but the ones outside diff context remains here
 <div class="comments inline-comments">
   %for f_path, lines in c.inline_comments:
-    %for line_no, comments in lines.iteritems():
-      <div class="comments-list-chunk" data-f_path="${f_path}" data-line_no="${line_no}" data-target-id="${h.safeid(h.safe_unicode(f_path))}_${line_no}">
+    %for line_no, comments in lines.items():
+      <div class="comments-list-chunk" data-f_path="${f_path}" data-line_no="${line_no}" data-target-id="${h.safeid(f_path)}_${line_no}">
         %for co in comments:
             ${comment_block(co)}
         %endfor
@@ -192,7 +192,7 @@
   </div>
 </div>
 
-<script>
+<script>'use strict';
 
 $(document).ready(function () {
 
--- a/kallithea/templates/changeset/changeset_range.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/changeset/changeset_range.html	Sat May 02 21:20:43 2020 +0200
@@ -56,7 +56,7 @@
                             <div class="cs_${op} clearfix">
                                 <span class="node">
                                     <i class="icon-diff-${op}"></i>
-                                    ${h.link_to(h.safe_unicode(path), '#%s' % fid)}
+                                    ${h.link_to(path, '#%s' % fid)}
                                 </span>
                                 <div class="changes">${h.fancy_file_stats(stats)}</div>
                             </div>
--- a/kallithea/templates/changeset/diff_block.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/changeset/diff_block.html	Sat May 02 21:20:43 2020 +0200
@@ -22,7 +22,7 @@
     <div id="${id_fid}" class="panel panel-default ${cls}">
         <div class="panel-heading clearfix">
                 <div class="pull-left">
-                    ${h.safe_unicode(cs_filename)}
+                    ${cs_filename}
                 </div>
                 <div class="pull-left diff-actions">
                   <span>
@@ -57,13 +57,13 @@
                     %endif
                   </span>
 
-                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=h.safe_unicode(cs_filename),diff2=cs_rev,diff1=a_rev,diff='diff',fulldiff=1)}" data-toggle="tooltip" title="${_('Show full diff for this file')}">
+                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='diff',fulldiff=1)}" data-toggle="tooltip" title="${_('Show full diff for this file')}">
                       <i class="icon-file-code"></i></a>
-                  <a href="${h.url('files_diff_2way_home',repo_name=cs_repo_name,f_path=h.safe_unicode(cs_filename),diff2=cs_rev,diff1=a_rev,diff='diff',fulldiff=1)}" data-toggle="tooltip" title="${_('Show full side-by-side diff for this file')}">
+                  <a href="${h.url('files_diff_2way_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='diff',fulldiff=1)}" data-toggle="tooltip" title="${_('Show full side-by-side diff for this file')}">
                       <i class="icon-docs"></i></a>
-                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=h.safe_unicode(cs_filename),diff2=cs_rev,diff1=a_rev,diff='raw')}" data-toggle="tooltip" title="${_('Raw diff')}">
+                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='raw')}" data-toggle="tooltip" title="${_('Raw diff')}">
                       <i class="icon-diff"></i></a>
-                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=h.safe_unicode(cs_filename),diff2=cs_rev,diff1=a_rev,diff='download')}" data-toggle="tooltip" title="${_('Download diff')}">
+                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='download')}" data-toggle="tooltip" title="${_('Download diff')}">
                       <i class="icon-floppy"></i></a>
                   ${c.ignorews_url(request.GET, url_fid)}
                   ${c.context_url(request.GET, url_fid)}
@@ -73,7 +73,7 @@
                     ${h.checkbox('checkbox-show-inline-' + id_fid, checked="checked",class_="show-inline-comments",**{'data-id_for':id_fid})}
                 </div>
         </div>
-        <div class="no-padding panel-body" data-f_path="${h.safe_unicode(cs_filename)}">
+        <div class="no-padding panel-body" data-f_path="${cs_filename}">
             ${diff|n}
             %if op and cs_filename.rsplit('.')[-1] in ['png', 'gif', 'jpg', 'bmp']:
               <div class="btn btn-image-diff-show">Show images</div>
@@ -96,9 +96,9 @@
 </%def>
 
 <%def name="diff_block_js()">
-<script type="text/javascript">
+<script>'use strict';
 $(document).ready(function(){
-    $('.btn-image-diff-show').click(function(e){
+    $('.btn-image-diff-show').click(function(){
         $('.btn-image-diff-show').hide();
         $('.btn-image-diff-swap').show();
         $('.img-diff-swapable')
@@ -112,10 +112,10 @@
         $('#'+e.currentTarget.id+'-img-a.img-diff-swapable')
           .before($('#'+e.currentTarget.id+'-img-b.img-diff-swapable'));
     });
-    var reset = function(e){
+    function reset(e){
         $('#'+e.currentTarget.id+'-img-a.img-diff-swapable')
           .after($('#'+e.currentTarget.id+'-img-b.img-diff-swapable'));
-    };
+    }
     $('.btn-image-diff-swap').mouseup(reset);
     $('.btn-image-diff-swap').mouseleave(reset);
 
--- a/kallithea/templates/compare/compare_cs.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/compare/compare_cs.html	Sat May 02 21:20:43 2020 +0200
@@ -63,17 +63,17 @@
 %if c.is_ajax_preview:
 <div id="jsdata" style="display:none">${h.js(c.jsdata)}</div>
 %else:
-<script type="text/javascript" src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
+<script src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
 %endif
 
-<script type="text/javascript">
+<script>'use strict';
     var jsdata = ${h.js(c.jsdata)};
     var graph = new BranchRenderer('graph_canvas', 'graph_content_pr', 'chg_');
 
     $(document).ready(function(){
         graph.render(jsdata);
 
-        $('.expand_commit').click(function(e){
+        $('.expand_commit').click(function(){
             $(this).next('.mid').find('.message').toggleClass('expanded');
             graph.render(jsdata);
         });
--- a/kallithea/templates/compare/compare_diff.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/compare/compare_diff.html	Sat May 02 21:20:43 2020 +0200
@@ -72,7 +72,7 @@
                     <div class="cs_${op} clearfix">
                       <span class="node">
                           <i class="icon-diff-${op}"></i>
-                          ${h.link_to(h.safe_unicode(path), '#%s' % fid)}
+                          ${h.link_to(path, '#%s' % fid)}
                       </span>
                       <div class="changes">${h.fancy_file_stats(stats)}</div>
                     </div>
@@ -98,7 +98,7 @@
     </div>
 
 </div>
-    <script type="text/javascript">
+    <script>'use strict';
 
    $(document).ready(function(){
     var cache = {};
@@ -154,7 +154,7 @@
     make_revision_dropdown("#compare_org",   ${h.js(c.a_repo.repo_name)},  ${h.js(c.a_ref_name)},  'cache');
     make_revision_dropdown("#compare_other", ${h.js(c.cs_repo.repo_name)}, ${h.js(c.cs_ref_name)}, 'cache2');
 
-    var values_changed = function() {
+    function values_changed() {
         var values = $('#compare_org').select2('data') && $('#compare_other').select2('data');
         if (values) {
              $('#compare_revs').removeClass("disabled");
@@ -167,7 +167,7 @@
     values_changed();
     $('#compare_org').change(values_changed);
     $('#compare_other').change(values_changed);
-    $('#compare_revs').on('click', function(e){
+    $('#compare_revs').on('click', function(){
         var org = $('#compare_org').select2('data');
         var other = $('#compare_other').select2('data');
         if (!org || !other) {
--- a/kallithea/templates/data_table/_dt_elements.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/data_table/_dt_elements.html	Sat May 02 21:20:43 2020 +0200
@@ -31,6 +31,12 @@
   </div>
 </%def>
 
+<%def name="following(repo_id, repo_following)">
+  %if request.authuser.username != 'default':
+    <a href="#" class="${'following' if repo_following else 'follow'}" onclick="return toggleFollowingRepo(this, ${repo_id});"><i class="list-extra icon-heart-empty show-follow" title="${_('Follow')}"></i><i class="list-extra icon-heart show-following" title="${_('Unfollow')}"></i></a>
+  %endif
+</%def>
+
 <%def name="last_change(last_change)">
   <span data-toggle="tooltip" title="${h.fmt_date(last_change)}" date="${last_change}">${h.age(last_change)}</span>
 </%def>
--- a/kallithea/templates/email_templates/changeset_comment.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/changeset_comment.html	Sat May 02 21:20:43 2020 +0200
@@ -33,7 +33,8 @@
     </tr>
     <tr>
         <td>
-<%include file="button.html" args="url=cs_comment_url,title=_('View Comment'),padding_bottom=False"/>\
+<% title = _('View Comment') %>\
+<%include file="button.html" args="url=cs_comment_url,title=title,padding_bottom=False"/>\
         </td>
     </tr>
 </table>
--- a/kallithea/templates/email_templates/changeset_comment.txt	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/changeset_comment.txt	Sat May 02 21:20:43 2020 +0200
@@ -13,4 +13,5 @@
 ${_('by')|n,unicode} \
 ${cs_author.full_name_and_username|n,unicode}.
 
-<%include file="button.txt" args="url=cs_comment_url,title=_('View Comment')"/>\
+<% title = _('View Comment') %>\
+<%include file="button.txt" args="url=cs_comment_url,title=title"/>\
--- a/kallithea/templates/email_templates/default.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/default.html	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,8 @@
 <%inherit file="main.html"/>\
 \
 <%block name="header">\
-<%include file="header.html" args="title=_('Message'),link=None"/>\
+<% title = _('Message') %>\
+<%include file="header.html" args="title=title,link=None"/>\
 </%block>\
 \
 <table cellpadding="0" cellspacing="0" border="0" width="100%">
--- a/kallithea/templates/email_templates/password_reset.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/password_reset.html	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,8 @@
 <%inherit file="main.html"/>\
 \
 <%block name="header">\
-<%include file="header.html" args="title=_('Password Reset Request'),link=None"/>\
+<% title = _('Password Reset Request') %>\
+<%include file="header.html" args="title=title,link=None"/>\
 </%block>\
 \
 <table cellpadding="0" cellspacing="0" border="0" width="100%" style="table-layout:fixed;word-wrap:break-word;">
--- a/kallithea/templates/email_templates/password_reset.txt	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/password_reset.txt	Sat May 02 21:20:43 2020 +0200
@@ -1,5 +1,6 @@
 <%block name="header">\
-<%include file="header.txt" args="title=_('Password Reset Request'),link=None"/>\
+<% title = _('Password Reset Request') %>\
+<%include file="header.txt" args="title=title,link=None"/>\
 </%block>\
 \
 ${_('Hello %s') % user|n,unicode},
--- a/kallithea/templates/email_templates/pull_request.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/pull_request.html	Sat May 02 21:20:43 2020 +0200
@@ -82,7 +82,8 @@
     </tr>
     <tr>
         <td>
-<%include file="button.html" args="url=pr_url,title=_('View Pull Request'),padding_bottom=False"/>\
+<% title = _('View Pull Request') %>\
+<%include file="button.html" args="url=pr_url,title=title,padding_bottom=False"/>\
         </td>
     </tr>
 </table>
--- a/kallithea/templates/email_templates/pull_request.txt	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/pull_request.txt	Sat May 02 21:20:43 2020 +0200
@@ -29,4 +29,5 @@
 ${h.shorter(desc, 80, firstline=True)|n,unicode}
 %endfor
 
-<%include file="button.txt" args="url=pr_url,title='View Pull Request'"/>\
+<% title = _('View Pull Request') %>\
+<%include file="button.txt" args="url=pr_url,title=title"/>\
--- a/kallithea/templates/email_templates/pull_request_comment.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/pull_request_comment.html	Sat May 02 21:20:43 2020 +0200
@@ -40,7 +40,8 @@
     </tr>
     <tr>
         <td>
-<%include file="button.html" args="url=pr_comment_url,title=_('View Comment'),padding_bottom=False"/>\
+<% title = _('View Comment') %>\
+<%include file="button.html" args="url=pr_comment_url,title=title,padding_bottom=False"/>\
         </td>
     </tr>
 </table>
--- a/kallithea/templates/email_templates/pull_request_comment.txt	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/pull_request_comment.txt	Sat May 02 21:20:43 2020 +0200
@@ -19,4 +19,5 @@
 ${_('branch')|n,unicode} \
 ${pr_target_branch|n,unicode}
 
-<%include file="button.txt" args="url=pr_comment_url,title=_('View Comment')"/>\
+<% title = _('View Comment') %>\
+<%include file="button.txt" args="url=pr_comment_url,title=title"/>\
--- a/kallithea/templates/email_templates/registration.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/registration.html	Sat May 02 21:20:43 2020 +0200
@@ -2,7 +2,8 @@
 <%inherit file="main.html"/>\
 \
 <%block name="header">\
-<%include file="header.html" args="title=_('New User Registration'),link=registered_user_url"/>\
+<% title = _('New User Registration') %>\
+<%include file="header.html" args="title=title,link=registered_user_url"/>\
 </%block>\
 \
 <table cellpadding="0" cellspacing="0" border="0" width="100%">
@@ -38,7 +39,8 @@
     </tr>
     <tr>
         <td colspan="2">
-<%include file="button.html" args="url=registered_user_url,title=_('View User Profile'),padding_bottom=False"/>\
+<% title = _('View User Profile') %>\
+<%include file="button.html" args="url=registered_user_url,title=title,padding_bottom=False"/>\
         </td>
     </tr>
 </table>
--- a/kallithea/templates/email_templates/registration.txt	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/email_templates/registration.txt	Sat May 02 21:20:43 2020 +0200
@@ -1,5 +1,6 @@
 <%block name="header">\
-<%include file="header.txt" args="title=_('New User Registration'),link=registered_user_url"/>\
+<% title = _('New User Registration') %>\
+<%include file="header.txt" args="title=title,link=registered_user_url"/>\
 </%block>\
 
 ${_('Username')|n,unicode}: ${new_username|n,unicode}
@@ -8,4 +9,5 @@
 
 ${_('Email')|n,unicode}: ${new_email|n,unicode}
 
-<%include file="button.txt" args="url=registered_user_url,title='View User Profile'"/>\
+<% title = _('View User Profile') %>\
+<%include file="button.txt" args="url=registered_user_url,title=title"/>\
--- a/kallithea/templates/files/diff_2way.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/files/diff_2way.html	Sat May 02 21:20:43 2020 +0200
@@ -3,8 +3,8 @@
 <%inherit file="/base/base.html"/>
 
 <%block name="js_extra">
-  <script type="text/javascript" src="${h.url('/codemirror/lib/codemirror.js')}"></script>
-  <script type="text/javascript" src="${h.url('/js/mergely.js')}"></script>
+  <script src="${h.url('/codemirror/lib/codemirror.js')}"></script>
+  <script src="${h.url('/js/mergely.js')}"></script>
 </%block>
 <%block name="css_extra">
   <link rel="stylesheet" type="text/css" href="${h.url('/codemirror/lib/codemirror.css')}"/>
@@ -34,22 +34,22 @@
         <div class="panel panel-default">
             <div class="panel-heading clearfix">
                     <div class="pull-left">
-                        ${h.link_to(h.safe_unicode(c.node1.path),h.url('files_home',repo_name=c.repo_name,
-                        revision=c.cs2.raw_id,f_path=h.safe_unicode(c.node1.path)))}
+                        ${h.link_to(c.node1.path,h.url('files_home',repo_name=c.repo_name,
+                        revision=c.cs2.raw_id,f_path=c.node1.path))}
                     </div>
                     <div class="pull-left diff-actions">
-                      <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=h.safe_unicode(c.node1.path),diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='diff',fulldiff=1)}"
+                      <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=c.node1.path,diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='diff',fulldiff=1)}"
                          data-toggle="tooltip"
                          title="${_('Show full diff for this file')}">
                           <i class="icon-file-code"></i></a>
-                      <a href="${h.url('files_diff_2way_home',repo_name=c.repo_name,f_path=h.safe_unicode(c.node1.path),diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='diff',fulldiff=1)}"
+                      <a href="${h.url('files_diff_2way_home',repo_name=c.repo_name,f_path=c.node1.path,diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='diff',fulldiff=1)}"
                          data-toggle="tooltip"
                          title="${_('Show full side-by-side diff for this file')}">
                           <i class="icon-docs"></i></a>
-                      <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=h.safe_unicode(c.node1.path),diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='raw')}"
+                      <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=c.node1.path,diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='raw')}"
                          data-toggle="tooltip"
                          title="${_('Raw diff')}"><i class="icon-diff"></i></a>
-                      <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=h.safe_unicode(c.node1.path),diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='download')}"
+                      <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=c.node1.path,diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='download')}"
                          data-toggle="tooltip"
                          title="${_('Download diff')}"><i class="icon-floppy"></i></a>
                       ${h.checkbox('ignorews', label=_('Ignore whitespace'))}
@@ -60,9 +60,9 @@
         </div>
     </div>
 
-<script>
-var orig1_url = ${h.jshtml(h.url('files_raw_home',repo_name=c.repo_name,f_path=h.safe_unicode(c.node1.path),revision=c.cs1.raw_id))};
-var orig2_url = ${h.jshtml(h.url('files_raw_home',repo_name=c.repo_name,f_path=h.safe_unicode(c.node2.path),revision=c.cs2.raw_id))};
+<script>'use strict';
+var orig1_url = ${h.jshtml(h.url('files_raw_home',repo_name=c.repo_name,f_path=c.node1.path,revision=c.cs1.raw_id))};
+var orig2_url = ${h.jshtml(h.url('files_raw_home',repo_name=c.repo_name,f_path=c.node2.path,revision=c.cs2.raw_id))};
 
 $(document).ready(function () {
     $('#compare').mergely({
--- a/kallithea/templates/files/files.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/files/files.html	Sat May 02 21:20:43 2020 +0200
@@ -3,7 +3,7 @@
 <%block name="title">
     ${_('%s Files') % c.repo_name}
     %if hasattr(c,'file'):
-        &middot; ${h.safe_unicode(c.file.path) or '/'}
+        &middot; ${c.file.path or '/'}
     %endif
 </%block>
 
@@ -36,7 +36,7 @@
     </div>
 </div>
 
-<script type="text/javascript">
+<script>'use strict';
 var CACHE = {};
 var CACHE_EXPIRE = 5*60*1000; //cache for 5*60s
 //used to construct links from the search list
@@ -50,7 +50,7 @@
 pyroutes.register('files_history_home', ${h.js(h.url('files_history_home', repo_name=c.repo_name,revision='%(revision)s',f_path='%(f_path)s'))}, ['revision', 'f_path']);
 pyroutes.register('files_authors_home', ${h.js(h.url('files_authors_home', repo_name=c.repo_name,revision='%(revision)s',f_path='%(f_path)s'))}, ['revision', 'f_path']);
 
-var ypjax_links = function(){
+function ypjax_links(){
     $('.ypjax-link').click(function(e){
 
         //don't do ypjax on middle click
@@ -88,7 +88,7 @@
     });
 }
 
-var load_state = function(state) {
+function load_state(state) {
     var $files_data = $('#files_data');
     var cache_key = state.url;
     var _cache_obj = CACHE[cache_key];
@@ -106,7 +106,7 @@
     }
 }
 
-var post_load_state = function(state) {
+function post_load_state(state) {
     ypjax_links();
     tooltip_activate();
 
@@ -125,16 +125,16 @@
     }
 
     function highlight_lines(lines){
-        for(pos in lines){
+        for(let pos in lines){
           $('#L'+lines[pos]).css('background-color','#FFFFBE');
         }
     }
-    page_highlights = location.href.substring(location.href.indexOf('#')+1).split('L');
+    let page_highlights = location.href.substring(location.href.indexOf('#')+1).split('L');
     if (page_highlights.length == 2){
-       highlight_ranges  = page_highlights[1].split(",");
+       let highlight_ranges  = page_highlights[1].split(",");
 
        var h_lines = [];
-       for (pos in highlight_ranges){
+       for (let pos in highlight_ranges){
             var _range = highlight_ranges[pos].split('-');
             if(_range.length == 2){
                 var start = parseInt(_range[0]);
@@ -217,12 +217,12 @@
     });
 
     // init the search filter
-    var _node_list_url = node_list_url.replace('__REV__', ${h.js(c.changeset.raw_id)}).replace('__FPATH__', ${h.js(h.safe_unicode(c.file.path))});
+    var _node_list_url = node_list_url.replace('__REV__', ${h.js(c.changeset.raw_id)}).replace('__FPATH__', ${h.js(c.file.path)});
     var _url_base = url_base.replace('__REV__', ${h.js(c.changeset.raw_id)});
     fileBrowserListeners(_node_list_url, _url_base);
 
     var initial_state = {url:window.location.href, title:document.title, url_base:_url_base,
-         node_list_url:_node_list_url, rev:${h.js(c.changeset.raw_id)}, f_path:${h.js(h.safe_unicode(c.file.path))}};
+         node_list_url:_node_list_url, rev:${h.js(c.changeset.raw_id)}, f_path:${h.js(c.file.path)}};
 
     // change branch filter
     $("#branch_selector").select2({
@@ -234,7 +234,7 @@
     $("#branch_selector").change(function(e){
         var selected = e.currentTarget.options[e.currentTarget.selectedIndex].value;
         if(selected && selected != ${h.js(c.changeset.raw_id)}){
-            window.location = pyroutes.url('files_home', {'repo_name': ${h.js(h.safe_unicode(c.repo_name))}, 'revision': selected, 'f_path': ${h.js(h.safe_unicode(c.file.path))}});
+            window.location = pyroutes.url('files_home', {'repo_name': ${h.js(c.repo_name)}, 'revision': selected, 'f_path': ${h.js(c.file.path)}});
             $("#body").hide();
         } else {
             $("#branch_selector").val(${h.js(c.changeset.raw_id)});
@@ -242,7 +242,7 @@
     });
     $('#show_authors').on('click', function(){
         $.ajax({
-            url: pyroutes.url('files_authors_home', {'revision': ${h.js(c.changeset.raw_id)}, 'f_path': ${h.js(h.safe_unicode(c.file.path))}}),
+            url: pyroutes.url('files_authors_home', {'revision': ${h.js(c.changeset.raw_id)}, 'f_path': ${h.js(c.file.path)}}),
             success: function(data) {
                 $('#file_authors').html(data);
                 $('#file_authors').show();
--- a/kallithea/templates/files/files_add.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/files/files_add.html	Sat May 02 21:20:43 2020 +0200
@@ -5,9 +5,9 @@
 </%block>
 
 <%block name="js_extra">
-  <script type="text/javascript" src="${h.url('/codemirror/lib/codemirror.js')}"></script>
-  <script type="text/javascript" src="${h.url('/js/codemirror_loadmode.js')}"></script>
-  <script type="text/javascript" src="${h.url('/codemirror/mode/meta.js')}"></script>
+  <script src="${h.url('/codemirror/lib/codemirror.js')}"></script>
+  <script src="${h.url('/js/codemirror_loadmode.js')}"></script>
+  <script src="${h.url('/codemirror/mode/meta.js')}"></script>
 </%block>
 <%block name="css_extra">
   <link rel="stylesheet" type="text/css" href="${h.url('/codemirror/lib/codemirror.css')}"/>
@@ -70,7 +70,7 @@
               </div>
             </div>
             ${h.end_form()}
-            <script type="text/javascript">
+            <script>'use strict';
                 $(document).ready(function(){
                     var reset_url = ${h.jshtml(h.url('files_home',repo_name=c.repo_name,revision=c.cs.raw_id,f_path=c.f_path))};
                     var myCodeMirror = initCodeMirror('editor', ${h.jshtml(request.script_name)}, reset_url);
@@ -107,7 +107,7 @@
                     });
 
                     // on type the new filename set mode
-                    $filename_input.keyup(function(e){
+                    $filename_input.keyup(function(){
                         var file_data = CodeMirror.getFilenameAndExt(this.value);
                         if(file_data['ext'] != null){
                             var detected_mode = CodeMirror.findModeByExtension(file_data['ext']) || CodeMirror.findModeByMIME('text/plain');
--- a/kallithea/templates/files/files_browser.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/files/files_browser.html	Sat May 02 21:20:43 2020 +0200
@@ -13,7 +13,7 @@
     %if node.is_submodule():
         <%return node.url or '#'%>
     %else:
-        <%return h.url('files_home', repo_name=c.repo_name, revision=c.changeset.raw_id, f_path=h.safe_unicode(node.path))%>
+        <%return h.url('files_home', repo_name=c.repo_name, revision=c.changeset.raw_id, f_path=node.path)%>
     %endif
 </%def>
 <%def name="_file_name(iconclass, name)">
@@ -109,7 +109,7 @@
     </div>
 </div>
 
-<script>
+<script>'use strict';
     $(document).ready(function(){
         // init node filter if we pass GET param ?search=1
         var search_GET = ${h.js(request.GET.get('search',''))};
--- a/kallithea/templates/files/files_edit.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/files/files_edit.html	Sat May 02 21:20:43 2020 +0200
@@ -5,9 +5,9 @@
 </%block>
 
 <%block name="js_extra">
-  <script type="text/javascript" src="${h.url('/codemirror/lib/codemirror.js')}"></script>
-  <script type="text/javascript" src="${h.url('/js/codemirror_loadmode.js')}"></script>
-  <script type="text/javascript" src="${h.url('/codemirror/mode/meta.js')}"></script>
+  <script src="${h.url('/codemirror/lib/codemirror.js')}"></script>
+  <script src="${h.url('/js/codemirror_loadmode.js')}"></script>
+  <script src="${h.url('/codemirror/mode/meta.js')}"></script>
 </%block>
 <%block name="css_extra">
   <link rel="stylesheet" type="text/css" href="${h.url('/codemirror/lib/codemirror.css')}"/>
@@ -59,7 +59,7 @@
                     </span>
               </div>
               <div class="panel-body no-padding">
-                <textarea id="editor" name="content" style="display:none">${h.escape(c.file.content)|n}</textarea>
+                <textarea id="editor" name="content" style="display:none">${h.escape(h.safe_str(c.file.content))|n}</textarea>
               </div>
             </div>
             <div>
@@ -77,7 +77,7 @@
     </div>
 </div>
 
-<script type="text/javascript">
+<script>'use strict';
     $(document).ready(function(){
         var reset_url = ${h.jshtml(h.url('files_home',repo_name=c.repo_name,revision=c.cs.raw_id,f_path=c.file.path))};
         var myCodeMirror = initCodeMirror('editor', ${h.jshtml(request.script_name)}, reset_url);
--- a/kallithea/templates/followers/followers.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/followers/followers.html	Sat May 02 21:20:43 2020 +0200
@@ -25,7 +25,7 @@
         </div>
     </div>
 </div>
-<script type="text/javascript">
+<script>'use strict';
   $(document).ready(function(){
     var $followers = $('#followers');
     $followers.on('click','.pager_link',function(e){
--- a/kallithea/templates/forks/fork.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/forks/fork.html	Sat May 02 21:20:43 2020 +0200
@@ -88,7 +88,7 @@
     </div>
     ${h.end_form()}
 </div>
-<script>
+<script>'use strict';
     $(document).ready(function(){
         $("#repo_group").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/forks/forks.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/forks/forks.html	Sat May 02 21:20:43 2020 +0200
@@ -25,7 +25,7 @@
         </div>
     </div>
 </div>
-<script type="text/javascript">
+<script>'use strict';
   $(document).ready(function(){
       var $forks = $('#forks');
       $forks.on('click','.pager_link',function(e){
--- a/kallithea/templates/index_base.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/index_base.html	Sat May 02 21:20:43 2020 +0200
@@ -44,22 +44,23 @@
         </div>
     </div>
 
-      <script>
-        var data = ${h.js(c.data)},
-            $dataTable = $("#repos_list_wrap").DataTable({
+      <script>'use strict';
+        var data = ${h.js(c.data)};
+        $("#repos_list_wrap").DataTable({
                 data: data.records,
                 columns: [
                     {data: "raw_name", visible: false, searchable: false},
                     {title: ${h.jshtml(_('Repository'))}, data: "name", orderData: [0,], render: {
-                        filter: function(data, type, row, meta) {
+                        filter: function(data, type, row) {
                             return row.just_name;
                         }
                     }},
+                    {data: "following", defaultContent: '', sortable: false},
                     {data: "desc", title: ${h.jshtml(_('Description'))}, searchable: false},
                     {data: "last_change_iso", defaultContent: '', visible: false, searchable: false},
-                    {data: "last_change", defaultContent: '', title: ${h.jshtml(_('Last Change'))}, orderData: [3,], searchable: false},
+                    {data: "last_change", defaultContent: '', title: ${h.jshtml(_('Last Change'))}, orderData: [4,], searchable: false},
                     {data: "last_rev_raw", defaultContent: '', visible: false, searchable: false},
-                    {data: "last_changeset", defaultContent: '', title: ${h.jshtml(_('Tip'))}, orderData: [5,], searchable: false},
+                    {data: "last_changeset", defaultContent: '', title: ${h.jshtml(_('Tip'))}, orderData: [6,], searchable: false},
                     {data: "owner", defaultContent: '', title: ${h.jshtml(_('Owner'))}, searchable: false},
                     {data: "atom", defaultContent: '', sortable: false}
                 ],
--- a/kallithea/templates/journal/journal.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/journal/journal.html	Sat May 02 21:20:43 2020 +0200
@@ -41,7 +41,7 @@
         </div>
     </div>
 
-<script type="text/javascript">
+<script>'use strict';
 
     $('#j_filter').click(function(){
         var $jfilter = $('#j_filter');
@@ -49,9 +49,9 @@
             $jfilter.val('');
         }
     });
-    var fix_j_filter_width = function(len){
+    function fix_j_filter_width(len){
         $('#j_filter').css('width', Math.max(80, len*6.50)+'px');
-    };
+    }
     $('#j_filter').keyup(function(){
         fix_j_filter_width($('#j_filter').val().length);
     });
@@ -72,7 +72,7 @@
 
 </script>
 
-<script type="text/javascript">
+<script>'use strict';
     $(document).ready(function(){
         var $journal = $('#journal');
         $journal.on('click','.pager_link',function(e){
--- a/kallithea/templates/login.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/login.html	Sat May 02 21:20:43 2020 +0200
@@ -64,7 +64,7 @@
             </div>
         </div>
         ${h.end_form()}
-        <script type="text/javascript">
+        <script>'use strict';
         $(document).ready(function(){
             $('#username').focus();
         });
--- a/kallithea/templates/password_reset.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/password_reset.html	Sat May 02 21:20:43 2020 +0200
@@ -7,7 +7,7 @@
 
 <%block name="js_extra">
     %if c.captcha_active:
-        <script type="text/javascript" src="https://www.google.com/recaptcha/api.js"></script>
+        <script src="https://www.google.com/recaptcha/api.js"></script>
     %endif
 </%block>
 
@@ -53,7 +53,7 @@
                 </div>
         </div>
         ${h.end_form()}
-        <script type="text/javascript">
+        <script>'use strict';
          $(document).ready(function(){
             $('#email').focus();
          });
--- a/kallithea/templates/pullrequests/pullrequest.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/pullrequests/pullrequest.html	Sat May 02 21:20:43 2020 +0200
@@ -91,12 +91,12 @@
 
 </div>
 
-<script type="text/javascript" src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
-<script type="text/javascript">
+<script src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
+<script>'use strict';
   pyroutes.register('pullrequest_repo_info', ${h.js(url('pullrequest_repo_info',repo_name='%(repo_name)s'))}, ['repo_name']);
 
   var pendingajax = undefined;
-  var otherrepoChanged = function(){
+  function otherrepoChanged(){
       var $other_ref = $('#other_ref');
       $other_ref.prop('disabled', true);
       var repo_name = $('#other_repo').val();
@@ -132,9 +132,9 @@
               $other_ref.prop('disabled', false);
               loadPreview();
           });
-  };
+  }
 
-  var loadPreview = function(){
+  function loadPreview(){
       //url template
       var url = ${h.js(h.url('compare_url',
                          repo_name='__other_repo__',
@@ -162,7 +162,7 @@
           '__other_ref_name__': other_ref[2]
       }; // gather the org/other ref and repo here
 
-      for (k in rev_data){
+      for (let k in rev_data){
           url = url.replace(k,rev_data[k]);
       }
 
@@ -170,7 +170,7 @@
           pendingajax.abort();
           pendingajax = undefined;
       }
-      pendingajax = asynchtml(url, $('#pull_request_overview'), function(o){
+      pendingajax = asynchtml(url, $('#pull_request_overview'), function(){
           pendingajax = undefined;
       });
   }
@@ -186,14 +186,14 @@
           maxResults: 50,
           sortResults: branchSort
       });
-      $("#org_ref").on("change", function(e){
+      $("#org_ref").on("change", function(){
           loadPreview();
       });
 
       $("#other_repo").select2({
           dropdownAutoWidth: true
       });
-      $("#other_repo").on("change", function(e){
+      $("#other_repo").on("change", function(){
           otherrepoChanged();
       });
 
@@ -202,7 +202,7 @@
           maxResults: 50,
           sortResults: branchSort
       });
-      $("#other_ref").on("change", function(e){
+      $("#other_ref").on("change", function(){
           loadPreview();
       });
 
--- a/kallithea/templates/pullrequests/pullrequest_data.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/pullrequests/pullrequest_data.html	Sat May 02 21:20:43 2020 +0200
@@ -80,7 +80,7 @@
 </div>
 
 %if hasattr(pullrequests, 'pager'):
-    ${pullrequests.pager(**request.GET.mixed())}
+    ${pullrequests.pager()}
 %endif
 
 </%def>
--- a/kallithea/templates/pullrequests/pullrequest_show.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/pullrequests/pullrequest_show.html	Sat May 02 21:20:43 2020 +0200
@@ -300,7 +300,7 @@
                     <div class="cs_${op} clearfix">
                       <span class="node">
                           <i class="icon-diff-${op}"></i>
-                          ${h.link_to(h.safe_unicode(path), '#%s' % fid)}
+                          ${h.link_to(path, '#%s' % fid)}
                       </span>
                       <div class="changes">${h.fancy_file_stats(stats)}</div>
                     </div>
@@ -312,10 +312,10 @@
             </div>
         </div>
     </div>
-    <script>
+    <script>'use strict';
     // TODO: switch this to pyroutes
-    AJAX_COMMENT_URL = ${h.js(url('pullrequest_comment',repo_name=c.repo_name,pull_request_id=c.pull_request.pull_request_id))};
-    AJAX_COMMENT_DELETE_URL = ${h.js(url('pullrequest_comment_delete',repo_name=c.repo_name,comment_id='__COMMENT_ID__'))};
+    var AJAX_COMMENT_URL = ${h.js(url('pullrequest_comment',repo_name=c.repo_name,pull_request_id=c.pull_request.pull_request_id))};
+    var AJAX_COMMENT_DELETE_URL = ${h.js(url('pullrequest_comment_delete',repo_name=c.repo_name,comment_id='__COMMENT_ID__'))};
 
     pyroutes.register('pullrequest_comment', ${h.js(url('pullrequest_comment',repo_name='%(repo_name)s',pull_request_id='%(pull_request_id)s'))}, ['repo_name', 'pull_request_id']);
     pyroutes.register('pullrequest_comment_delete', ${h.js(url('pullrequest_comment_delete',repo_name='%(repo_name)s',comment_id='%(comment_id)s'))}, ['repo_name', 'comment_id']);
@@ -343,12 +343,12 @@
     ## main comment form and it status
     ${comment.comments(change_status=c.allowed_to_change_status)}
 
-    <script type="text/javascript">
+    <script>'use strict';
       $(document).ready(function(){
           PullRequestAutoComplete($('#user'));
           SimpleUserAutoComplete($('#owner'));
 
-          $('.code-difftable').on('click', '.add-bubble', function(e){
+          $('.code-difftable').on('click', '.add-bubble', function(){
               show_comment_form($(this));
           });
 
@@ -368,7 +368,7 @@
               $('#pr-form-clone').prop('disabled',!update);
           });
           var $org_review_members = $('#review_members').clone();
-          $('#pr-form-reset').click(function(e){
+          $('#pr-form-reset').click(function(){
               $('.pr-do-edit').hide();
               $('.pr-not-edit').show();
               $('#pr-form-save').prop('disabled',false);
--- a/kallithea/templates/register.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/register.html	Sat May 02 21:20:43 2020 +0200
@@ -7,7 +7,7 @@
 
 <%block name="js_extra">
     %if c.captcha_active:
-        <script type="text/javascript" src="https://www.google.com/recaptcha/api.js"></script>
+        <script src="https://www.google.com/recaptcha/api.js"></script>
     %endif
 </%block>
 
@@ -90,7 +90,7 @@
                 </div>
         </div>
         ${h.end_form()}
-        <script type="text/javascript">
+        <script>'use strict';
         $(document).ready(function(){
             $('#username').focus();
         });
--- a/kallithea/templates/summary/statistics.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/summary/statistics.html	Sat May 02 21:20:43 2020 +0200
@@ -15,9 +15,9 @@
 <%block name="head_extra">
   <link href="${h.url('atom_feed_home',repo_name=c.db_repo.repo_name,api_key=request.authuser.api_key)}" rel="alternate" title="${_('%s ATOM feed') % c.repo_name}" type="application/atom+xml" />
   <link href="${h.url('rss_feed_home',repo_name=c.db_repo.repo_name,api_key=request.authuser.api_key)}" rel="alternate" title="${_('%s RSS feed') % c.repo_name}" type="application/rss+xml" />
-  <script type="text/javascript" src="${h.url('/js/jquery.flot.js', ver=c.kallithea_version)}"></script>
-  <script type="text/javascript" src="${h.url('/js/jquery.flot.selection.js', ver=c.kallithea_version)}"></script>
-  <script type="text/javascript" src="${h.url('/js/jquery.flot.time.js', ver=c.kallithea_version)}"></script>
+  <script src="${h.url('/js/jquery.flot.js', ver=c.kallithea_version)}"></script>
+  <script src="${h.url('/js/jquery.flot.selection.js', ver=c.kallithea_version)}"></script>
+  <script src="${h.url('/js/jquery.flot.time.js', ver=c.kallithea_version)}"></script>
 </%block>
 
 <%def name="main()">
@@ -28,8 +28,8 @@
     </div>
 
     <div class="graph panel-body">
-         <div>
-         %if c.no_data:
+        <div>
+        %if not c.stats_percentage:
            ${c.no_data_msg}
            %if h.HasPermissionAny('hg.admin')('enable stats on from summary'):
                 ${h.link_to(_('Enable'),h.url('edit_repo',repo_name=c.repo_name),class_="btn btn-default btn-xs")}
@@ -51,19 +51,17 @@
     </div>
 </div>
 
-<script type="text/javascript">
+<script>'use strict';
 var data = ${h.js(c.trending_languages)};
 var total = 0;
-var no_data = true;
 var tbl = document.createElement('table');
 tbl.setAttribute('class','trending_language_tbl');
 var cnt = 0;
-for (var i=0;i<data.length;i++){
+for (let i=0;i<data.length;i++){
     total+= data[i][1].count;
 }
-for (var i=0;i<data.length;i++){
+for (let i=0;i<data.length;i++){
     cnt += 1;
-    no_data = false;
 
     var hide = cnt>2;
     var tr = document.createElement('tr');
@@ -105,7 +103,7 @@
     if(cnt == 3){
         var show_more = document.createElement('tr');
         var td = document.createElement('td');
-        lnk = document.createElement('a');
+        let lnk = document.createElement('a');
 
         lnk.href='#';
         lnk.innerHTML = ${h.jshtml(_('Show more'))};
@@ -120,7 +118,7 @@
 }
 
 </script>
-<script type="text/javascript">
+<script>'use strict';
 
 /**
  * Plots summary graph
@@ -138,17 +136,15 @@
             "to":to
         }
     };
-    for(var key in dataset){
-      var data = dataset[key].data;
+    for(let key in dataset){
+      let data = dataset[key].data;
       for(var d in data){
         data[d].time *= 1000;
       }
     }
-    for(var key in overview_dataset){
+    for(let key in overview_dataset){
       overview_dataset[key][0] *= 1000;
     }
-    var dataset = dataset;
-    var overview_dataset = [overview_dataset];
     var choiceContainer = $("#legend_choices")[0];
     var choiceContainerTable = $("#legend_choices_tables")[0];
     var $plotContainer = $('#commit_history');
@@ -160,8 +156,7 @@
         bars: {show:true, align: 'center', lineWidth: 4},
         legend: {show:true,
                 container: "#legend_container",
-                labelFormatter: function(label, series) {
-                        // series is the series object for the label
+                labelFormatter: function(label) {
                         return '<a href="javascript:void(0)"> ' + label + '</a>';
                     }
         },
@@ -209,8 +204,6 @@
 
     /**
      * generate checkboxes accordingly to data
-     * @param keys
-     * @returns
      */
     function generateCheckboxes(data) {
         //append checkboxes
@@ -259,12 +252,9 @@
 
         var data = [];
         var new_dataset = {};
-        var keys = [];
-        var max_commits = 0;
         for(var key in dataset){
-
             for(var ds in dataset[key].data){
-                commit_data = dataset[key].data[ds];
+                let commit_data = dataset[key].data[ds];
                 if (commit_data.time >= ranges.xaxis.from && commit_data.time <= ranges.xaxis.to){
                     if(new_dataset[key] === undefined){
                         new_dataset[key] = {data:[],label:key};
@@ -292,7 +282,7 @@
     * redraw using new checkbox data
     */
     function plotchoiced(e){
-        args = e.data;
+        let args = e.data;
         var cur_data = args[0];
         var cur_ranges = args[1];
 
@@ -384,7 +374,7 @@
             if (previousPoint != item.datapoint) {
                 previousPoint = item.datapoint;
 
-                var tooltip = $("#tooltip")[0];
+                let tooltip = $("#tooltip")[0];
                 if(tooltip) {
                       tooltip.parentNode.removeChild(tooltip);
                 }
@@ -421,7 +411,7 @@
             }
         }
         else {
-              var tooltip = $("#tooltip")[0];
+              let tooltip = $("#tooltip")[0];
 
               if(tooltip) {
                     tooltip.parentNode.removeChild(tooltip);
@@ -434,14 +424,14 @@
      * MAIN EXECUTION
      */
 
-    var data = getDataAccordingToRanges(initial_ranges);
+    let data = getDataAccordingToRanges(initial_ranges);
     generateCheckboxes(data);
 
     //main plot
     var plot = $.plot(plotContainer,data,plot_options);
 
     //overview
-    var overview = $.plot(overviewContainer, overview_dataset, overview_options);
+    var overview = $.plot(overviewContainer, [overview_dataset], overview_options);
 
     //show initial selection on overview
     overview.setSelection(initial_ranges);
--- a/kallithea/templates/summary/summary.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/templates/summary/summary.html	Sat May 02 21:20:43 2020 +0200
@@ -27,8 +27,8 @@
   <link href="${h.url('atom_feed_home',repo_name=c.db_repo.repo_name,api_key=request.authuser.api_key)}" rel="alternate" title="${_('%s ATOM feed') % c.repo_name}" type="application/atom+xml" />
   <link href="${h.url('rss_feed_home',repo_name=c.db_repo.repo_name,api_key=request.authuser.api_key)}" rel="alternate" title="${_('%s RSS feed') % c.repo_name}" type="application/rss+xml" />
 
-  <script>
-  redirect_hash_branch = function(){
+  <script>'use strict';
+  function redirect_hash_branch(){
     var branch = window.location.hash.replace(/^#(.*)/, '$1');
     if (branch){
       window.location = ${h.js(h.url('changelog_home',repo_name=c.repo_name,branch='__BRANCH__'))}
@@ -238,9 +238,9 @@
 </div>
 %endif
 
-<script type="text/javascript">
+<script>'use strict';
 $(document).ready(function(){
-    $('#clone-url input').click(function(e){
+    $('#clone-url input').click(function(){
         if($(this).hasClass('selected')){
             $(this).removeClass('selected');
             return ;
@@ -254,17 +254,17 @@
     var $clone_by_name = $('#clone_by_name');
     var $clone_by_id = $('#clone_by_id');
     var $clone_ssh = $('#clone_ssh');
-    $clone_url.on('click', '.btn.use-name', function(e){
+    $clone_url.on('click', '.btn.use-name', function(){
         $clone_by_name.show();
         $clone_by_id.hide();
         $clone_ssh.hide();
     });
-    $clone_url.on('click', '.btn.use-id', function(e){
+    $clone_url.on('click', '.btn.use-id', function(){
         $clone_by_id.show();
         $clone_by_name.hide();
         $clone_ssh.hide();
     });
-    $clone_url.on('click', '.btn.use-ssh', function(e){
+    $clone_url.on('click', '.btn.use-ssh', function(){
         $clone_by_id.hide();
         $clone_by_name.hide();
         $clone_ssh.show();
@@ -309,7 +309,7 @@
     $('#download_options').change(function(e){
        var new_cs = e.added
 
-       for(k in tmpl_links){
+       for(let k in tmpl_links){
            var s = $('#'+k+'_link');
            if(s){
              var title_tmpl = ${h.jshtml(_('Download %s as %s') % ('__CS_NAME__','__CS_EXT__'))};
@@ -334,20 +334,18 @@
 </script>
 
 %if c.show_stats:
-<script type="text/javascript">
+<script>'use strict';
 $(document).ready(function(){
     var data = ${h.js(c.trending_languages)};
     var total = 0;
-    var no_data = true;
     var tbl = document.createElement('table');
     tbl.setAttribute('class','table');
     var cnt = 0;
-    for (var i=0;i<data.length;i++){
+    for (let i=0;i<data.length;i++){
         total+= data[i][1].count;
     }
-    for (var i=0;i<data.length;i++){
+    for (let i=0;i<data.length;i++){
         cnt += 1;
-        no_data = false;
 
         var hide = cnt>2;
         var tr = document.createElement('tr');
@@ -395,7 +393,7 @@
         if(cnt == 3){
             var show_more = document.createElement('tr');
             var td = document.createElement('td');
-            lnk = document.createElement('a');
+            let lnk = document.createElement('a');
 
             lnk.href='#';
             lnk.innerHTML = ${h.jshtml(_('Show more'))};
--- a/kallithea/tests/__init__.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/__init__.py	Sat May 02 21:20:43 2020 +0200
@@ -17,13 +17,3 @@
 
 Refer to docs/contributing.rst for details on running the test suite.
 """
-
-import pytest
-
-
-if getattr(pytest, 'register_assert_rewrite', None):
-    # make sure that all asserts under kallithea/tests benefit from advanced
-    # assert reporting with pytest-3.0.0+, including api/api_base.py,
-    # models/common.py etc.
-    # See also: https://docs.pytest.org/en/latest/assert.html#advanced-assertion-introspection
-    pytest.register_assert_rewrite('kallithea.tests')
--- a/kallithea/tests/api/api_base.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/api/api_base.py	Sat May 02 21:20:43 2020 +0200
@@ -23,8 +23,9 @@
 import mock
 import pytest
 
+from kallithea.lib import ext_json
 from kallithea.lib.auth import AuthUser
-from kallithea.lib.compat import json
+from kallithea.lib.utils2 import ascii_bytes
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.db import ChangesetStatus, PullRequest, RepoGroup, Repository, Setting, Ui, User
 from kallithea.model.gist import GistModel
@@ -34,13 +35,13 @@
 from kallithea.model.scm import ScmModel
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 API_URL = '/_admin/api'
-TEST_USER_GROUP = u'test_user_group'
-TEST_REPO_GROUP = u'test_repo_group'
+TEST_USER_GROUP = 'test_user_group'
+TEST_REPO_GROUP = 'test_repo_group'
 
 fixture = Fixture()
 
@@ -48,11 +49,10 @@
 def _build_data(apikey, method, **kw):
     """
     Builds API data with given random ID
-
-    :param random_id:
+    For convenience, the json is returned as str
     """
     random_id = random.randrange(1, 9999)
-    return random_id, json.dumps({
+    return random_id, ext_json.dumps({
         "id": random_id,
         "api_key": apikey,
         "method": method,
@@ -60,7 +60,7 @@
     })
 
 
-jsonify = lambda obj: json.loads(json.dumps(obj))
+jsonify = lambda obj: ext_json.loads(ext_json.dumps(obj))
 
 
 def crash(*args, **kwargs):
@@ -75,15 +75,15 @@
 
 ## helpers
 def make_user_group(name=TEST_USER_GROUP):
-    gr = fixture.create_user_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
+    gr = fixture.create_user_group(name, cur_user=base.TEST_USER_ADMIN_LOGIN)
     UserGroupModel().add_user_to_group(user_group=gr,
-                                       user=TEST_USER_ADMIN_LOGIN)
+                                       user=base.TEST_USER_ADMIN_LOGIN)
     Session().commit()
     return gr
 
 
 def make_repo_group(name=TEST_REPO_GROUP):
-    gr = fixture.create_repo_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
+    gr = fixture.create_repo_group(name, cur_user=base.TEST_USER_ADMIN_LOGIN)
     Session().commit()
     return gr
 
@@ -94,19 +94,18 @@
 
     @classmethod
     def setup_class(cls):
-        cls.usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        cls.usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         cls.apikey = cls.usr.api_key
         cls.test_user = UserModel().create_or_update(
             username='test-api',
             password='test',
             email='test@example.com',
-            firstname=u'first',
-            lastname=u'last'
+            firstname='first',
+            lastname='last'
         )
         Session().commit()
         cls.TEST_USER_LOGIN = cls.test_user.username
         cls.apikey_regular = cls.test_user.api_key
-        cls.default_user_username = User.get_default_user().username
 
     @classmethod
     def teardown_class(cls):
@@ -127,7 +126,7 @@
             'error': None,
             'result': expected
         })
-        given = json.loads(given)
+        given = ext_json.loads(given)
         assert expected == given, (expected, given)
 
     def _compare_error(self, id_, expected, given):
@@ -136,7 +135,7 @@
             'error': expected,
             'result': None
         })
-        given = json.loads(given)
+        given = ext_json.loads(given)
         assert expected == given, (expected, given)
 
     def test_Optional_object(self):
@@ -230,10 +229,10 @@
 
     def test_api_get_user(self):
         id_, params = _build_data(self.apikey, 'get_user',
-                                  userid=TEST_USER_ADMIN_LOGIN)
+                                  userid=base.TEST_USER_ADMIN_LOGIN)
         response = api_call(self, params)
 
-        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = usr.get_api_data()
         ret['permissions'] = AuthUser(dbuser=usr).permissions
 
@@ -252,7 +251,7 @@
         id_, params = _build_data(self.apikey, 'get_user')
         response = api_call(self, params)
 
-        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = usr.get_api_data()
         ret['permissions'] = AuthUser(dbuser=usr).permissions
 
@@ -281,7 +280,7 @@
     def test_api_pull_remote(self):
         # Note: pulling from local repos is a mis-feature - it will bypass access control
         # ... but ok, if the path already has been set in the database
-        repo_name = u'test_pull'
+        repo_name = 'test_pull'
         r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         # hack around that clone_uri can't be set to to a local path
         # (as shown by test_api_create_repo_clone_uri_local)
@@ -305,7 +304,7 @@
         assert pre_cached_tip != post_cached_tip
 
     def test_api_pull_fork(self):
-        fork_name = u'fork'
+        fork_name = 'fork'
         fixture.create_fork(self.REPO, fork_name)
         id_, params = _build_data(self.apikey, 'pull',
                                   repoid=fork_name,)
@@ -327,7 +326,7 @@
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_pull_custom_remote(self):
-        repo_name = u'test_pull_custom_remote'
+        repo_name = 'test_pull_custom_remote'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
         custom_remote_path = os.path.join(Ui.get_by_key('paths', '/').ui_value, self.REPO)
@@ -358,58 +357,24 @@
         expected = 'Error occurred during rescan repositories action'
         self._compare_error(id_, expected, given=response.body)
 
-    def test_api_invalidate_cache(self):
-        repo = RepoModel().get_by_repo_name(self.REPO)
-        repo.scm_instance_cached()  # seed cache
-
-        id_, params = _build_data(self.apikey, 'invalidate_cache',
-                                  repoid=self.REPO)
-        response = api_call(self, params)
-
-        expected = {
-            'msg': "Cache for repository `%s` was invalidated" % (self.REPO,),
-            'repository': self.REPO
-        }
-        self._compare_ok(id_, expected, given=response.body)
-
-    @mock.patch.object(ScmModel, 'mark_for_invalidation', crash)
-    def test_api_invalidate_cache_error(self):
-        id_, params = _build_data(self.apikey, 'invalidate_cache',
-                                  repoid=self.REPO)
-        response = api_call(self, params)
-
-        expected = 'Error occurred during cache invalidation action'
-        self._compare_error(id_, expected, given=response.body)
-
-    def test_api_invalidate_cache_regular_user_no_permission(self):
-        repo = RepoModel().get_by_repo_name(self.REPO)
-        repo.scm_instance_cached() # seed cache
-
-        id_, params = _build_data(self.apikey_regular, 'invalidate_cache',
-                                  repoid=self.REPO)
-        response = api_call(self, params)
-
-        expected = "repository `%s` does not exist" % (self.REPO,)
-        self._compare_error(id_, expected, given=response.body)
-
     def test_api_create_existing_user(self):
         id_, params = _build_data(self.apikey, 'create_user',
-                                  username=TEST_USER_ADMIN_LOGIN,
+                                  username=base.TEST_USER_ADMIN_LOGIN,
                                   email='test@example.com',
                                   password='trololo')
         response = api_call(self, params)
 
-        expected = "user `%s` already exist" % TEST_USER_ADMIN_LOGIN
+        expected = "user `%s` already exist" % base.TEST_USER_ADMIN_LOGIN
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_create_user_with_existing_email(self):
         id_, params = _build_data(self.apikey, 'create_user',
-                                  username=TEST_USER_ADMIN_LOGIN + 'new',
-                                  email=TEST_USER_REGULAR_EMAIL,
+                                  username=base.TEST_USER_ADMIN_LOGIN + 'new',
+                                  email=base.TEST_USER_REGULAR_EMAIL,
                                   password='trololo')
         response = api_call(self, params)
 
-        expected = "email `%s` already exist" % TEST_USER_REGULAR_EMAIL
+        expected = "email `%s` already exist" % base.TEST_USER_REGULAR_EMAIL
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_create_user(self):
@@ -489,10 +454,10 @@
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_delete_user(self):
-        usr = UserModel().create_or_update(username=u'test_user',
-                                           password=u'qweqwe',
-                                           email=u'u232@example.com',
-                                           firstname=u'u1', lastname=u'u1')
+        usr = UserModel().create_or_update(username='test_user',
+                                           password='qweqwe',
+                                           email='u232@example.com',
+                                           firstname='u1', lastname='u1')
         Session().commit()
         username = usr.username
         email = usr.email
@@ -510,10 +475,10 @@
 
     @mock.patch.object(UserModel, 'delete', crash)
     def test_api_delete_user_when_exception_happened(self):
-        usr = UserModel().create_or_update(username=u'test_user',
-                                           password=u'qweqwe',
-                                           email=u'u232@example.com',
-                                           firstname=u'u1', lastname=u'u1')
+        usr = UserModel().create_or_update(username='test_user',
+                                           password='qweqwe',
+                                           email='u232@example.com',
+                                           firstname='u1', lastname='u1')
         Session().commit()
         username = usr.username
 
@@ -525,7 +490,7 @@
         expected = ret
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('name,expected', [
+    @base.parametrize('name,expected', [
         ('firstname', 'new_username'),
         ('lastname', 'new_username'),
         ('email', 'new_username'),
@@ -558,22 +523,22 @@
         self._compare_ok(id_, expected, given=response.body)
 
     def test_api_update_user_no_changed_params(self):
-        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = jsonify(usr.get_api_data())
         id_, params = _build_data(self.apikey, 'update_user',
-                                  userid=TEST_USER_ADMIN_LOGIN)
+                                  userid=base.TEST_USER_ADMIN_LOGIN)
 
         response = api_call(self, params)
         ret = {
             'msg': 'updated user ID:%s %s' % (
-                usr.user_id, TEST_USER_ADMIN_LOGIN),
+                usr.user_id, base.TEST_USER_ADMIN_LOGIN),
             'user': ret
         }
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
 
     def test_api_update_user_by_user_id(self):
-        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = jsonify(usr.get_api_data())
         id_, params = _build_data(self.apikey, 'update_user',
                                   userid=usr.user_id)
@@ -581,7 +546,7 @@
         response = api_call(self, params)
         ret = {
             'msg': 'updated user ID:%s %s' % (
-                usr.user_id, TEST_USER_ADMIN_LOGIN),
+                usr.user_id, base.TEST_USER_ADMIN_LOGIN),
             'user': ret
         }
         expected = ret
@@ -598,7 +563,7 @@
 
     @mock.patch.object(UserModel, 'update_user', crash)
     def test_api_update_user_when_exception_happens(self):
-        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = jsonify(usr.get_api_data())
         id_, params = _build_data(self.apikey, 'update_user',
                                   userid=usr.user_id)
@@ -610,7 +575,7 @@
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_get_repo(self):
-        new_group = u'some_new_group'
+        new_group = 'some_new_group'
         make_user_group(new_group)
         RepoModel().grant_user_group_permission(repo=self.REPO,
                                                 group_name=new_group,
@@ -619,8 +584,8 @@
         id_, params = _build_data(self.apikey, 'get_repo',
                                   repoid=self.REPO)
         response = api_call(self, params)
-        assert u"tags" not in response.json[u'result']
-        assert u'pull_requests' not in response.json[u'result']
+        assert "tags" not in response.json['result']
+        assert 'pull_requests' not in response.json['result']
 
         repo = RepoModel().get_by_repo_name(self.REPO)
         ret = repo.get_api_data()
@@ -655,10 +620,10 @@
                                   with_revision_names=True,
                                   with_pullrequests=True)
         response = api_call(self, params)
-        assert u"v0.2.0" in response.json[u'result'][u'tags']
-        assert u'pull_requests' in response.json[u'result']
+        assert "v0.2.0" in response.json['result']['tags']
+        assert 'pull_requests' in response.json['result']
 
-    @parametrize('grant_perm', [
+    @base.parametrize('grant_perm', [
         ('repository.admin'),
         ('repository.write'),
         ('repository.read'),
@@ -706,7 +671,7 @@
 
     def test_api_get_repo_by_non_admin_no_permission_to_repo(self):
         RepoModel().grant_user_permission(repo=self.REPO,
-                                          user=self.default_user_username,
+                                          user=User.DEFAULT_USER_NAME,
                                           perm='repository.none')
         try:
             RepoModel().grant_user_permission(repo=self.REPO,
@@ -721,7 +686,7 @@
             self._compare_error(id_, expected, given=response.body)
         finally:
             RepoModel().grant_user_permission(repo=self.REPO,
-                                              user=self.default_user_username,
+                                              user=User.DEFAULT_USER_NAME,
                                               perm='repository.read')
 
     def test_api_get_repo_that_doesn_not_exist(self):
@@ -755,7 +720,7 @@
 
         self._compare_ok(id_, expected, given=response.body)
 
-    @parametrize('name,ret_type', [
+    @base.parametrize('name,ret_type', [
         ('all', 'all'),
         ('dirs', 'dirs'),
         ('files', 'files'),
@@ -807,10 +772,10 @@
         response = api_call(self, params)
 
         expected = ('ret_type must be one of %s'
-                    % (','.join(['files', 'dirs', 'all'])))
+                    % (','.join(sorted(['files', 'dirs', 'all']))))
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('name,ret_type,grant_perm', [
+    @base.parametrize('name,ret_type,grant_perm', [
         ('all', 'all', 'repository.write'),
         ('dirs', 'dirs', 'repository.admin'),
         ('files', 'files', 'repository.read'),
@@ -838,10 +803,10 @@
             RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN)
 
     def test_api_create_repo(self):
-        repo_name = u'api-repo'
+        repo_name = 'api-repo'
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,
         )
         response = api_call(self, params)
@@ -857,18 +822,18 @@
         self._compare_ok(id_, expected, given=response.body)
         fixture.destroy_repo(repo_name)
 
-    @parametrize('repo_name', [
-        u'',
-        u'.',
-        u'..',
-        u':',
-        u'/',
-        u'<test>',
+    @base.parametrize('repo_name', [
+        '',
+        '.',
+        '..',
+        ':',
+        '/',
+        '<test>',
     ])
     def test_api_create_repo_bad_names(self, repo_name):
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,
         )
         response = api_call(self, params)
@@ -883,11 +848,11 @@
     def test_api_create_repo_clone_uri_local(self):
         # cloning from local repos was a mis-feature - it would bypass access control
         # TODO: introduce other test coverage of actual remote cloning
-        clone_uri = os.path.join(TESTS_TMP_PATH, self.REPO)
-        repo_name = u'api-repo'
+        clone_uri = os.path.join(base.TESTS_TMP_PATH, self.REPO)
+        repo_name = 'api-repo'
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,
                                   clone_uri=clone_uri,
         )
@@ -897,16 +862,16 @@
         fixture.destroy_repo(repo_name)
 
     def test_api_create_repo_and_repo_group(self):
-        repo_group_name = u'my_gr'
-        repo_name = u'%s/api-repo' % repo_group_name
+        repo_group_name = 'my_gr'
+        repo_name = '%s/api-repo' % repo_group_name
 
         # repo creation can no longer also create repo group
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,)
         response = api_call(self, params)
-        expected = u'repo group `%s` not found' % repo_group_name
+        expected = 'repo group `%s` not found' % repo_group_name
         self._compare_error(id_, expected, given=response.body)
         assert RepoModel().get_by_repo_name(repo_name) is None
 
@@ -916,7 +881,7 @@
 
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,)
         response = api_call(self, params)
         expected = {
@@ -932,9 +897,9 @@
         fixture.destroy_repo_group(repo_group_name)
 
     def test_api_create_repo_in_repo_group_without_permission(self):
-        repo_group_basename = u'api-repo-repo'
-        repo_group_name = u'%s/%s' % (TEST_REPO_GROUP, repo_group_basename)
-        repo_name = u'%s/api-repo' % repo_group_name
+        repo_group_basename = 'api-repo-repo'
+        repo_group_name = '%s/%s' % (TEST_REPO_GROUP, repo_group_basename)
+        repo_name = '%s/api-repo' % repo_group_name
 
         top_group = RepoGroup.get_by_group_name(TEST_REPO_GROUP)
         assert top_group
@@ -968,7 +933,7 @@
         fixture.destroy_repo_group(repo_group_name)
 
     def test_api_create_repo_unknown_owner(self):
-        repo_name = u'api-repo'
+        repo_name = 'api-repo'
         owner = 'i-dont-exist'
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
@@ -980,7 +945,7 @@
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_create_repo_dont_specify_owner(self):
-        repo_name = u'api-repo'
+        repo_name = 'api-repo'
         owner = 'i-dont-exist'
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
@@ -1000,7 +965,7 @@
         fixture.destroy_repo(repo_name)
 
     def test_api_create_repo_by_non_admin(self):
-        repo_name = u'api-repo'
+        repo_name = 'api-repo'
         owner = 'i-dont-exist'
         id_, params = _build_data(self.apikey_regular, 'create_repo',
                                   repo_name=repo_name,
@@ -1020,7 +985,7 @@
         fixture.destroy_repo(repo_name)
 
     def test_api_create_repo_by_non_admin_specify_owner(self):
-        repo_name = u'api-repo'
+        repo_name = 'api-repo'
         owner = 'i-dont-exist'
         id_, params = _build_data(self.apikey_regular, 'create_repo',
                                   repo_name=repo_name,
@@ -1036,7 +1001,7 @@
         repo_name = self.REPO
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,)
         response = api_call(self, params)
         expected = "repo `%s` already exist" % repo_name
@@ -1048,38 +1013,38 @@
         repo_name = '%s/%s' % (group_name, 'could-be-outside')
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,)
         response = api_call(self, params)
-        expected = u'repo group `%s` not found' % group_name
+        expected = 'repo group `%s` not found' % group_name
         self._compare_error(id_, expected, given=response.body)
         fixture.destroy_repo(repo_name)
 
     @mock.patch.object(RepoModel, 'create', crash)
     def test_api_create_repo_exception_occurred(self):
-        repo_name = u'api-repo'
+        repo_name = 'api-repo'
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
                                   repo_type=self.REPO_TYPE,)
         response = api_call(self, params)
         expected = 'failed to create repository `%s`' % repo_name
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('changing_attr,updates', [
-        ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
-        ('description', {'description': u'new description'}),
+    @base.parametrize('changing_attr,updates', [
+        ('owner', {'owner': base.TEST_USER_REGULAR_LOGIN}),
+        ('description', {'description': 'new description'}),
         ('clone_uri', {'clone_uri': 'http://example.com/repo'}), # will fail - pulling from non-existing repo should fail
         ('clone_uri', {'clone_uri': '/repo'}), # will fail - pulling from local repo was a mis-feature - it would bypass access control
         ('clone_uri', {'clone_uri': None}),
         ('landing_rev', {'landing_rev': 'branch:master'}),
         ('enable_statistics', {'enable_statistics': True}),
         ('enable_downloads', {'enable_downloads': True}),
-        ('name', {'name': u'new_repo_name'}),
-        ('repo_group', {'group': u'test_group_for_update'}),
+        ('name', {'name': 'new_repo_name'}),
+        ('repo_group', {'group': 'test_group_for_update'}),
     ])
     def test_api_update_repo(self, changing_attr, updates):
-        repo_name = u'api_update_me'
+        repo_name = 'api_update_me'
         repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         if changing_attr == 'repo_group':
             fixture.create_repo_group(updates['group'])
@@ -1090,10 +1055,10 @@
         if changing_attr == 'name':
             repo_name = updates['name']
         if changing_attr == 'repo_group':
-            repo_name = u'/'.join([updates['group'], repo_name])
+            repo_name = '/'.join([updates['group'], repo_name])
         try:
             if changing_attr == 'clone_uri' and updates['clone_uri']:
-                expected = u'failed to update repo `%s`' % repo_name
+                expected = 'failed to update repo `%s`' % repo_name
                 self._compare_error(id_, expected, given=response.body)
             else:
                 expected = {
@@ -1106,22 +1071,22 @@
             if changing_attr == 'repo_group':
                 fixture.destroy_repo_group(updates['group'])
 
-    @parametrize('changing_attr,updates', [
-        ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
-        ('description', {'description': u'new description'}),
+    @base.parametrize('changing_attr,updates', [
+        ('owner', {'owner': base.TEST_USER_REGULAR_LOGIN}),
+        ('description', {'description': 'new description'}),
         ('clone_uri', {'clone_uri': 'http://example.com/repo'}), # will fail - pulling from non-existing repo should fail
         ('clone_uri', {'clone_uri': '/repo'}), # will fail - pulling from local repo was a mis-feature - it would bypass access control
         ('clone_uri', {'clone_uri': None}),
         ('landing_rev', {'landing_rev': 'branch:master'}),
         ('enable_statistics', {'enable_statistics': True}),
         ('enable_downloads', {'enable_downloads': True}),
-        ('name', {'name': u'new_repo_name'}),
-        ('repo_group', {'group': u'test_group_for_update'}),
+        ('name', {'name': 'new_repo_name'}),
+        ('repo_group', {'group': 'test_group_for_update'}),
     ])
     def test_api_update_group_repo(self, changing_attr, updates):
-        group_name = u'lololo'
+        group_name = 'lololo'
         fixture.create_repo_group(group_name)
-        repo_name = u'%s/api_update_me' % group_name
+        repo_name = '%s/api_update_me' % group_name
         repo = fixture.create_repo(repo_name, repo_group=group_name, repo_type=self.REPO_TYPE)
         if changing_attr == 'repo_group':
             fixture.create_repo_group(updates['group'])
@@ -1130,12 +1095,12 @@
                                   repoid=repo_name, **updates)
         response = api_call(self, params)
         if changing_attr == 'name':
-            repo_name = u'%s/%s' % (group_name, updates['name'])
+            repo_name = '%s/%s' % (group_name, updates['name'])
         if changing_attr == 'repo_group':
-            repo_name = u'/'.join([updates['group'], repo_name.rsplit('/', 1)[-1]])
+            repo_name = '/'.join([updates['group'], repo_name.rsplit('/', 1)[-1]])
         try:
             if changing_attr == 'clone_uri' and updates['clone_uri']:
-                expected = u'failed to update repo `%s`' % repo_name
+                expected = 'failed to update repo `%s`' % repo_name
                 self._compare_error(id_, expected, given=response.body)
             else:
                 expected = {
@@ -1150,7 +1115,7 @@
         fixture.destroy_repo_group(group_name)
 
     def test_api_update_repo_repo_group_does_not_exist(self):
-        repo_name = u'admin_owned'
+        repo_name = 'admin_owned'
         fixture.create_repo(repo_name)
         updates = {'group': 'test_group_for_update'}
         id_, params = _build_data(self.apikey, 'update_repo',
@@ -1163,7 +1128,7 @@
             fixture.destroy_repo(repo_name)
 
     def test_api_update_repo_regular_user_not_allowed(self):
-        repo_name = u'admin_owned'
+        repo_name = 'admin_owned'
         fixture.create_repo(repo_name)
         updates = {'description': 'something else'}
         id_, params = _build_data(self.apikey_regular, 'update_repo',
@@ -1177,10 +1142,10 @@
 
     @mock.patch.object(RepoModel, 'update', crash)
     def test_api_update_repo_exception_occurred(self):
-        repo_name = u'api_update_me'
+        repo_name = 'api_update_me'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         id_, params = _build_data(self.apikey, 'update_repo',
-                                  repoid=repo_name, owner=TEST_USER_ADMIN_LOGIN,)
+                                  repoid=repo_name, owner=base.TEST_USER_ADMIN_LOGIN,)
         response = api_call(self, params)
         try:
             expected = 'failed to update repo `%s`' % repo_name
@@ -1189,8 +1154,8 @@
             fixture.destroy_repo(repo_name)
 
     def test_api_update_repo_regular_user_change_repo_name(self):
-        repo_name = u'admin_owned'
-        new_repo_name = u'new_repo_name'
+        repo_name = 'admin_owned'
+        new_repo_name = 'new_repo_name'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         RepoModel().grant_user_permission(repo=repo_name,
                                           user=self.TEST_USER_LOGIN,
@@ -1209,8 +1174,8 @@
             fixture.destroy_repo(new_repo_name)
 
     def test_api_update_repo_regular_user_change_repo_name_allowed(self):
-        repo_name = u'admin_owned'
-        new_repo_name = u'new_repo_name'
+        repo_name = 'admin_owned'
+        new_repo_name = 'new_repo_name'
         repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         RepoModel().grant_user_permission(repo=repo_name,
                                           user=self.TEST_USER_LOGIN,
@@ -1232,12 +1197,12 @@
             fixture.destroy_repo(new_repo_name)
 
     def test_api_update_repo_regular_user_change_owner(self):
-        repo_name = u'admin_owned'
+        repo_name = 'admin_owned'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         RepoModel().grant_user_permission(repo=repo_name,
                                           user=self.TEST_USER_LOGIN,
                                           perm='repository.admin')
-        updates = {'owner': TEST_USER_ADMIN_LOGIN}
+        updates = {'owner': base.TEST_USER_ADMIN_LOGIN}
         id_, params = _build_data(self.apikey_regular, 'update_repo',
                                   repoid=repo_name, **updates)
         response = api_call(self, params)
@@ -1248,7 +1213,7 @@
             fixture.destroy_repo(repo_name)
 
     def test_api_delete_repo(self):
-        repo_name = u'api_delete_me'
+        repo_name = 'api_delete_me'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
         id_, params = _build_data(self.apikey, 'delete_repo',
@@ -1266,7 +1231,7 @@
             fixture.destroy_repo(repo_name)
 
     def test_api_delete_repo_by_non_admin(self):
-        repo_name = u'api_delete_me'
+        repo_name = 'api_delete_me'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
                             cur_user=self.TEST_USER_LOGIN)
         id_, params = _build_data(self.apikey_regular, 'delete_repo',
@@ -1284,7 +1249,7 @@
             fixture.destroy_repo(repo_name)
 
     def test_api_delete_repo_by_non_admin_no_permission(self):
-        repo_name = u'api_delete_me'
+        repo_name = 'api_delete_me'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         try:
             id_, params = _build_data(self.apikey_regular, 'delete_repo',
@@ -1296,7 +1261,7 @@
             fixture.destroy_repo(repo_name)
 
     def test_api_delete_repo_exception_occurred(self):
-        repo_name = u'api_delete_me'
+        repo_name = 'api_delete_me'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         try:
             with mock.patch.object(RepoModel, 'delete', crash):
@@ -1310,11 +1275,11 @@
             fixture.destroy_repo(repo_name)
 
     def test_api_fork_repo(self):
-        fork_name = u'api-repo-fork'
+        fork_name = 'api-repo-fork'
         id_, params = _build_data(self.apikey, 'fork_repo',
                                   repoid=self.REPO,
                                   fork_name=fork_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
         )
         response = api_call(self, params)
 
@@ -1328,9 +1293,9 @@
         self._compare_ok(id_, expected, given=response.body)
         fixture.destroy_repo(fork_name)
 
-    @parametrize('fork_name', [
-        u'api-repo-fork',
-        u'%s/api-repo-fork' % TEST_REPO_GROUP,
+    @base.parametrize('fork_name', [
+        'api-repo-fork',
+        '%s/api-repo-fork' % TEST_REPO_GROUP,
     ])
     def test_api_fork_repo_non_admin(self, fork_name):
         id_, params = _build_data(self.apikey_regular, 'fork_repo',
@@ -1350,11 +1315,11 @@
         fixture.destroy_repo(fork_name)
 
     def test_api_fork_repo_non_admin_specify_owner(self):
-        fork_name = u'api-repo-fork'
+        fork_name = 'api-repo-fork'
         id_, params = _build_data(self.apikey_regular, 'fork_repo',
                                   repoid=self.REPO,
                                   fork_name=fork_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
         )
         response = api_call(self, params)
         expected = 'Only Kallithea admin can specify `owner` param'
@@ -1363,10 +1328,10 @@
 
     def test_api_fork_repo_non_admin_no_permission_to_fork(self):
         RepoModel().grant_user_permission(repo=self.REPO,
-                                          user=self.default_user_username,
+                                          user=User.DEFAULT_USER_NAME,
                                           perm='repository.none')
         try:
-            fork_name = u'api-repo-fork'
+            fork_name = 'api-repo-fork'
             id_, params = _build_data(self.apikey_regular, 'fork_repo',
                                       repoid=self.REPO,
                                       fork_name=fork_name,
@@ -1376,17 +1341,17 @@
             self._compare_error(id_, expected, given=response.body)
         finally:
             RepoModel().grant_user_permission(repo=self.REPO,
-                                              user=self.default_user_username,
+                                              user=User.DEFAULT_USER_NAME,
                                               perm='repository.read')
             fixture.destroy_repo(fork_name)
 
-    @parametrize('name,perm', [
+    @base.parametrize('name,perm', [
         ('read', 'repository.read'),
         ('write', 'repository.write'),
         ('admin', 'repository.admin'),
     ])
     def test_api_fork_repo_non_admin_no_create_repo_permission(self, name, perm):
-        fork_name = u'api-repo-fork'
+        fork_name = 'api-repo-fork'
         # regardless of base repository permission, forking is disallowed
         # when repository creation is disabled
         RepoModel().grant_user_permission(repo=self.REPO,
@@ -1404,7 +1369,7 @@
         fixture.destroy_repo(fork_name)
 
     def test_api_fork_repo_unknown_owner(self):
-        fork_name = u'api-repo-fork'
+        fork_name = 'api-repo-fork'
         owner = 'i-dont-exist'
         id_, params = _build_data(self.apikey, 'fork_repo',
                                   repoid=self.REPO,
@@ -1416,16 +1381,16 @@
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_fork_repo_fork_exists(self):
-        fork_name = u'api-repo-fork'
+        fork_name = 'api-repo-fork'
         fixture.create_fork(self.REPO, fork_name)
 
         try:
-            fork_name = u'api-repo-fork'
+            fork_name = 'api-repo-fork'
 
             id_, params = _build_data(self.apikey, 'fork_repo',
                                       repoid=self.REPO,
                                       fork_name=fork_name,
-                                      owner=TEST_USER_ADMIN_LOGIN,
+                                      owner=base.TEST_USER_ADMIN_LOGIN,
             )
             response = api_call(self, params)
 
@@ -1440,7 +1405,7 @@
         id_, params = _build_data(self.apikey, 'fork_repo',
                                   repoid=self.REPO,
                                   fork_name=fork_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
         )
         response = api_call(self, params)
 
@@ -1449,11 +1414,11 @@
 
     @mock.patch.object(RepoModel, 'create_fork', crash)
     def test_api_fork_repo_exception_occurred(self):
-        fork_name = u'api-repo-fork'
+        fork_name = 'api-repo-fork'
         id_, params = _build_data(self.apikey, 'fork_repo',
                                   repoid=self.REPO,
                                   fork_name=fork_name,
-                                  owner=TEST_USER_ADMIN_LOGIN,
+                                  owner=base.TEST_USER_ADMIN_LOGIN,
         )
         response = api_call(self, params)
 
@@ -1478,7 +1443,7 @@
         self._compare_ok(id_, expected, given=response.body)
 
     def test_api_get_user_groups(self):
-        gr_name = u'test_user_group2'
+        gr_name = 'test_user_group2'
         make_user_group(gr_name)
 
         try:
@@ -1486,7 +1451,7 @@
             response = api_call(self, params)
 
             expected = []
-            for gr_name in [TEST_USER_GROUP, u'test_user_group2']:
+            for gr_name in [TEST_USER_GROUP, 'test_user_group2']:
                 user_group = UserGroupModel().get_group(gr_name)
                 ret = user_group.get_api_data()
                 expected.append(ret)
@@ -1495,7 +1460,7 @@
             fixture.destroy_user_group(gr_name)
 
     def test_api_create_user_group(self):
-        group_name = u'some_new_group'
+        group_name = 'some_new_group'
         id_, params = _build_data(self.apikey, 'create_user_group',
                                   group_name=group_name)
         response = api_call(self, params)
@@ -1521,7 +1486,7 @@
 
     @mock.patch.object(UserGroupModel, 'create', crash)
     def test_api_get_user_group_exception_occurred(self):
-        group_name = u'exception_happens'
+        group_name = 'exception_happens'
         id_, params = _build_data(self.apikey, 'create_user_group',
                                   group_name=group_name)
         response = api_call(self, params)
@@ -1529,15 +1494,15 @@
         expected = 'failed to create group `%s`' % group_name
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('changing_attr,updates', [
-        ('group_name', {'group_name': u'new_group_name'}),
-        ('group_name', {'group_name': u'test_group_for_update'}),
-        ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
+    @base.parametrize('changing_attr,updates', [
+        ('group_name', {'group_name': 'new_group_name'}),
+        ('group_name', {'group_name': 'test_group_for_update'}),
+        ('owner', {'owner': base.TEST_USER_REGULAR_LOGIN}),
         ('active', {'active': False}),
         ('active', {'active': True}),
     ])
     def test_api_update_user_group(self, changing_attr, updates):
-        gr_name = u'test_group_for_update'
+        gr_name = 'test_group_for_update'
         user_group = fixture.create_user_group(gr_name)
         try:
             id_, params = _build_data(self.apikey, 'update_user_group',
@@ -1557,7 +1522,7 @@
 
     @mock.patch.object(UserGroupModel, 'update', crash)
     def test_api_update_user_group_exception_occurred(self):
-        gr_name = u'test_group'
+        gr_name = 'test_group'
         fixture.create_user_group(gr_name)
         try:
             id_, params = _build_data(self.apikey, 'update_user_group',
@@ -1569,16 +1534,16 @@
             fixture.destroy_user_group(gr_name)
 
     def test_api_add_user_to_user_group(self):
-        gr_name = u'test_group'
+        gr_name = 'test_group'
         fixture.create_user_group(gr_name)
         try:
             id_, params = _build_data(self.apikey, 'add_user_to_user_group',
                                       usergroupid=gr_name,
-                                      userid=TEST_USER_ADMIN_LOGIN)
+                                      userid=base.TEST_USER_ADMIN_LOGIN)
             response = api_call(self, params)
             expected = {
             'msg': 'added member `%s` to user group `%s`' % (
-                    TEST_USER_ADMIN_LOGIN, gr_name),
+                    base.TEST_USER_ADMIN_LOGIN, gr_name),
             'success': True
             }
             self._compare_ok(id_, expected, given=response.body)
@@ -1588,7 +1553,7 @@
     def test_api_add_user_to_user_group_that_doesnt_exist(self):
         id_, params = _build_data(self.apikey, 'add_user_to_user_group',
                                   usergroupid='false-group',
-                                  userid=TEST_USER_ADMIN_LOGIN)
+                                  userid=base.TEST_USER_ADMIN_LOGIN)
         response = api_call(self, params)
 
         expected = 'user group `%s` does not exist' % 'false-group'
@@ -1596,12 +1561,12 @@
 
     @mock.patch.object(UserGroupModel, 'add_user_to_group', crash)
     def test_api_add_user_to_user_group_exception_occurred(self):
-        gr_name = u'test_group'
+        gr_name = 'test_group'
         fixture.create_user_group(gr_name)
         try:
             id_, params = _build_data(self.apikey, 'add_user_to_user_group',
                                       usergroupid=gr_name,
-                                      userid=TEST_USER_ADMIN_LOGIN)
+                                      userid=base.TEST_USER_ADMIN_LOGIN)
             response = api_call(self, params)
             expected = 'failed to add member to user group `%s`' % gr_name
             self._compare_error(id_, expected, given=response.body)
@@ -1609,18 +1574,18 @@
             fixture.destroy_user_group(gr_name)
 
     def test_api_remove_user_from_user_group(self):
-        gr_name = u'test_group_3'
+        gr_name = 'test_group_3'
         gr = fixture.create_user_group(gr_name)
-        UserGroupModel().add_user_to_group(gr, user=TEST_USER_ADMIN_LOGIN)
+        UserGroupModel().add_user_to_group(gr, user=base.TEST_USER_ADMIN_LOGIN)
         Session().commit()
         try:
             id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
                                       usergroupid=gr_name,
-                                      userid=TEST_USER_ADMIN_LOGIN)
+                                      userid=base.TEST_USER_ADMIN_LOGIN)
             response = api_call(self, params)
             expected = {
                 'msg': 'removed member `%s` from user group `%s`' % (
-                    TEST_USER_ADMIN_LOGIN, gr_name
+                    base.TEST_USER_ADMIN_LOGIN, gr_name
                 ),
                 'success': True}
             self._compare_ok(id_, expected, given=response.body)
@@ -1629,14 +1594,14 @@
 
     @mock.patch.object(UserGroupModel, 'remove_user_from_group', crash)
     def test_api_remove_user_from_user_group_exception_occurred(self):
-        gr_name = u'test_group_3'
+        gr_name = 'test_group_3'
         gr = fixture.create_user_group(gr_name)
-        UserGroupModel().add_user_to_group(gr, user=TEST_USER_ADMIN_LOGIN)
+        UserGroupModel().add_user_to_group(gr, user=base.TEST_USER_ADMIN_LOGIN)
         Session().commit()
         try:
             id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
                                       usergroupid=gr_name,
-                                      userid=TEST_USER_ADMIN_LOGIN)
+                                      userid=base.TEST_USER_ADMIN_LOGIN)
             response = api_call(self, params)
             expected = 'failed to remove member from user group `%s`' % gr_name
             self._compare_error(id_, expected, given=response.body)
@@ -1644,7 +1609,7 @@
             fixture.destroy_user_group(gr_name)
 
     def test_api_delete_user_group(self):
-        gr_name = u'test_group'
+        gr_name = 'test_group'
         ugroup = fixture.create_user_group(gr_name)
         gr_id = ugroup.users_group_id
         try:
@@ -1661,7 +1626,7 @@
                 fixture.destroy_user_group(gr_name)
 
     def test_api_delete_user_group_that_is_assigned(self):
-        gr_name = u'test_group'
+        gr_name = 'test_group'
         ugroup = fixture.create_user_group(gr_name)
         gr_id = ugroup.users_group_id
 
@@ -1679,7 +1644,7 @@
                 fixture.destroy_user_group(gr_name)
 
     def test_api_delete_user_group_exception_occurred(self):
-        gr_name = u'test_group'
+        gr_name = 'test_group'
         ugroup = fixture.create_user_group(gr_name)
         gr_id = ugroup.users_group_id
         id_, params = _build_data(self.apikey, 'delete_user_group',
@@ -1693,7 +1658,7 @@
         finally:
             fixture.destroy_user_group(gr_name)
 
-    @parametrize('name,perm', [
+    @base.parametrize('name,perm', [
         ('none', 'repository.none'),
         ('read', 'repository.read'),
         ('write', 'repository.write'),
@@ -1703,13 +1668,13 @@
         id_, params = _build_data(self.apikey,
                                   'grant_user_permission',
                                   repoid=self.REPO,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   perm=perm)
         response = api_call(self, params)
 
         ret = {
             'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
-                perm, TEST_USER_ADMIN_LOGIN, self.REPO
+                perm, base.TEST_USER_ADMIN_LOGIN, self.REPO
             ),
             'success': True
         }
@@ -1721,7 +1686,7 @@
         id_, params = _build_data(self.apikey,
                                   'grant_user_permission',
                                   repoid=self.REPO,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   perm=perm)
         response = api_call(self, params)
 
@@ -1734,12 +1699,12 @@
         id_, params = _build_data(self.apikey,
                                   'grant_user_permission',
                                   repoid=self.REPO,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   perm=perm)
         response = api_call(self, params)
 
         expected = 'failed to edit permission for user: `%s` in repo: `%s`' % (
-            TEST_USER_ADMIN_LOGIN, self.REPO
+            base.TEST_USER_ADMIN_LOGIN, self.REPO
         )
         self._compare_error(id_, expected, given=response.body)
 
@@ -1747,12 +1712,12 @@
         id_, params = _build_data(self.apikey,
                                   'revoke_user_permission',
                                   repoid=self.REPO,
-                                  userid=TEST_USER_ADMIN_LOGIN, )
+                                  userid=base.TEST_USER_ADMIN_LOGIN, )
         response = api_call(self, params)
 
         expected = {
             'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
-                TEST_USER_ADMIN_LOGIN, self.REPO
+                base.TEST_USER_ADMIN_LOGIN, self.REPO
             ),
             'success': True
         }
@@ -1763,15 +1728,15 @@
         id_, params = _build_data(self.apikey,
                                   'revoke_user_permission',
                                   repoid=self.REPO,
-                                  userid=TEST_USER_ADMIN_LOGIN, )
+                                  userid=base.TEST_USER_ADMIN_LOGIN, )
         response = api_call(self, params)
 
         expected = 'failed to edit permission for user: `%s` in repo: `%s`' % (
-            TEST_USER_ADMIN_LOGIN, self.REPO
+            base.TEST_USER_ADMIN_LOGIN, self.REPO
         )
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('name,perm', [
+    @base.parametrize('name,perm', [
         ('none', 'repository.none'),
         ('read', 'repository.read'),
         ('write', 'repository.write'),
@@ -1853,7 +1818,7 @@
         )
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('name,perm,apply_to_children', [
+    @base.parametrize('name,perm,apply_to_children', [
         ('none', 'group.none', 'none'),
         ('read', 'group.read', 'none'),
         ('write', 'group.write', 'none'),
@@ -1878,20 +1843,20 @@
         id_, params = _build_data(self.apikey,
                                   'grant_user_permission_to_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   perm=perm, apply_to_children=apply_to_children)
         response = api_call(self, params)
 
         ret = {
             'msg': 'Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % (
-                perm, apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
+                perm, apply_to_children, base.TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
             ),
             'success': True
         }
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
 
-    @parametrize('name,perm,apply_to_children,grant_admin,access_ok', [
+    @base.parametrize('name,perm,apply_to_children,grant_admin,access_ok', [
         ('none_fails', 'group.none', 'none', False, False),
         ('read_fails', 'group.read', 'none', False, False),
         ('write_fails', 'group.write', 'none', False, False),
@@ -1914,13 +1879,13 @@
         id_, params = _build_data(self.apikey_regular,
                                   'grant_user_permission_to_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   perm=perm, apply_to_children=apply_to_children)
         response = api_call(self, params)
         if access_ok:
             ret = {
                 'msg': 'Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % (
-                    perm, apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
+                    perm, apply_to_children, base.TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
                 ),
                 'success': True
             }
@@ -1935,7 +1900,7 @@
         id_, params = _build_data(self.apikey,
                                   'grant_user_permission_to_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   perm=perm)
         response = api_call(self, params)
 
@@ -1948,16 +1913,16 @@
         id_, params = _build_data(self.apikey,
                                   'grant_user_permission_to_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   perm=perm)
         response = api_call(self, params)
 
         expected = 'failed to edit permission for user: `%s` in repo group: `%s`' % (
-            TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
+            base.TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
         )
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('name,apply_to_children', [
+    @base.parametrize('name,apply_to_children', [
         ('none', 'none'),
         ('all', 'all'),
         ('repos', 'repos'),
@@ -1965,26 +1930,26 @@
     ])
     def test_api_revoke_user_permission_from_repo_group(self, name, apply_to_children):
         RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP,
-                                               user=TEST_USER_ADMIN_LOGIN,
+                                               user=base.TEST_USER_ADMIN_LOGIN,
                                                perm='group.read',)
         Session().commit()
 
         id_, params = _build_data(self.apikey,
                                   'revoke_user_permission_from_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   apply_to_children=apply_to_children,)
         response = api_call(self, params)
 
         expected = {
             'msg': 'Revoked perm (recursive:%s) for user: `%s` in repo group: `%s`' % (
-                apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
+                apply_to_children, base.TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
             ),
             'success': True
         }
         self._compare_ok(id_, expected, given=response.body)
 
-    @parametrize('name,apply_to_children,grant_admin,access_ok', [
+    @base.parametrize('name,apply_to_children,grant_admin,access_ok', [
         ('none', 'none', False, False),
         ('all', 'all', False, False),
         ('repos', 'repos', False, False),
@@ -1999,7 +1964,7 @@
     def test_api_revoke_user_permission_from_repo_group_by_regular_user(
             self, name, apply_to_children, grant_admin, access_ok):
         RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP,
-                                               user=TEST_USER_ADMIN_LOGIN,
+                                               user=base.TEST_USER_ADMIN_LOGIN,
                                                perm='group.read',)
         Session().commit()
 
@@ -2012,13 +1977,13 @@
         id_, params = _build_data(self.apikey_regular,
                                   'revoke_user_permission_from_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
-                                  userid=TEST_USER_ADMIN_LOGIN,
+                                  userid=base.TEST_USER_ADMIN_LOGIN,
                                   apply_to_children=apply_to_children,)
         response = api_call(self, params)
         if access_ok:
             expected = {
                 'msg': 'Revoked perm (recursive:%s) for user: `%s` in repo group: `%s`' % (
-                    apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
+                    apply_to_children, base.TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
                 ),
                 'success': True
             }
@@ -2032,15 +1997,15 @@
         id_, params = _build_data(self.apikey,
                                   'revoke_user_permission_from_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
-                                  userid=TEST_USER_ADMIN_LOGIN, )
+                                  userid=base.TEST_USER_ADMIN_LOGIN, )
         response = api_call(self, params)
 
         expected = 'failed to edit permission for user: `%s` in repo group: `%s`' % (
-            TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
+            base.TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
         )
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('name,perm,apply_to_children', [
+    @base.parametrize('name,perm,apply_to_children', [
         ('none', 'group.none', 'none'),
         ('read', 'group.read', 'none'),
         ('write', 'group.write', 'none'),
@@ -2079,7 +2044,7 @@
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
 
-    @parametrize('name,perm,apply_to_children,grant_admin,access_ok', [
+    @base.parametrize('name,perm,apply_to_children,grant_admin,access_ok', [
         ('none_fails', 'group.none', 'none', False, False),
         ('read_fails', 'group.read', 'none', False, False),
         ('write_fails', 'group.write', 'none', False, False),
@@ -2146,7 +2111,7 @@
         )
         self._compare_error(id_, expected, given=response.body)
 
-    @parametrize('name,apply_to_children', [
+    @base.parametrize('name,apply_to_children', [
         ('none', 'none'),
         ('all', 'all'),
         ('repos', 'repos'),
@@ -2172,7 +2137,7 @@
         }
         self._compare_ok(id_, expected, given=response.body)
 
-    @parametrize('name,apply_to_children,grant_admin,access_ok', [
+    @base.parametrize('name,apply_to_children,grant_admin,access_ok', [
         ('none', 'none', False, False),
         ('all', 'all', False, False),
         ('repos', 'repos', False, False),
@@ -2187,7 +2152,7 @@
     def test_api_revoke_user_group_permission_from_repo_group_by_regular_user(
             self, name, apply_to_children, grant_admin, access_ok):
         RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP,
-                                               user=TEST_USER_ADMIN_LOGIN,
+                                               user=base.TEST_USER_ADMIN_LOGIN,
                                                perm='group.read',)
         Session().commit()
 
@@ -2206,7 +2171,7 @@
         if access_ok:
             expected = {
                 'msg': 'Revoked perm (recursive:%s) for user group: `%s` in repo group: `%s`' % (
-                    apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
+                    apply_to_children, base.TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
                 ),
                 'success': True
             }
@@ -2310,7 +2275,7 @@
 
     def test_api_get_gists_regular_user_with_different_userid(self):
         id_, params = _build_data(self.apikey_regular, 'get_gists',
-                                  userid=TEST_USER_ADMIN_LOGIN)
+                                  userid=base.TEST_USER_ADMIN_LOGIN)
         response = api_call(self, params)
         expected = 'userid is not the same as your user'
         self._compare_error(id_, expected, given=response.body)
@@ -2322,16 +2287,15 @@
                                   gist_type='public',
                                   files={'foobar': {'content': 'foo'}})
         response = api_call(self, params)
-        response_json = response.json
         expected = {
             'gist': {
-                'access_id': response_json['result']['gist']['access_id'],
-                'created_on': response_json['result']['gist']['created_on'],
+                'access_id': response.json['result']['gist']['access_id'],
+                'created_on': response.json['result']['gist']['created_on'],
                 'description': 'foobar-gist',
-                'expires': response_json['result']['gist']['expires'],
-                'gist_id': response_json['result']['gist']['gist_id'],
+                'expires': response.json['result']['gist']['expires'],
+                'gist_id': response.json['result']['gist']['gist_id'],
                 'type': 'public',
-                'url': response_json['result']['gist']['url']
+                'url': response.json['result']['gist']['url']
             },
             'msg': 'created new gist'
         }
@@ -2397,7 +2361,7 @@
         id_, params = _build_data(self.apikey, 'get_changesets',
                                   repoid=self.REPO, start=0, end=2)
         response = api_call(self, params)
-        result = json.loads(response.body)["result"]
+        result = ext_json.loads(response.body)["result"]
         assert len(result) == 3
         assert 'message' in result[0]
         assert 'added' not in result[0]
@@ -2406,7 +2370,7 @@
         id_, params = _build_data(self.apikey, 'get_changesets',
                                   repoid=self.REPO, start_date="2011-02-24T00:00:00", max_revisions=10)
         response = api_call(self, params)
-        result = json.loads(response.body)["result"]
+        result = ext_json.loads(response.body)["result"]
         assert len(result) == 10
         assert 'message' in result[0]
         assert 'added' not in result[0]
@@ -2419,7 +2383,7 @@
         id_, params = _build_data(self.apikey, 'get_changesets',
                                   repoid=self.REPO, branch_name=branch, start_date="2011-02-24T00:00:00")
         response = api_call(self, params)
-        result = json.loads(response.body)["result"]
+        result = ext_json.loads(response.body)["result"]
         assert len(result) == 5
         assert 'message' in result[0]
         assert 'added' not in result[0]
@@ -2428,7 +2392,7 @@
         id_, params = _build_data(self.apikey, 'get_changesets',
                                   repoid=self.REPO, start_date="2010-04-07T23:30:30", end_date="2010-04-08T00:31:14", with_file_list=True)
         response = api_call(self, params)
-        result = json.loads(response.body)["result"]
+        result = ext_json.loads(response.body)["result"]
         assert len(result) == 3
         assert 'message' in result[0]
         assert 'added' in result[0]
@@ -2438,7 +2402,7 @@
         id_, params = _build_data(self.apikey, 'get_changeset',
                                   repoid=self.REPO, raw_id=self.TEST_REVISION)
         response = api_call(self, params)
-        result = json.loads(response.body)["result"]
+        result = ext_json.loads(response.body)["result"]
         assert result["raw_id"] == self.TEST_REVISION
         assert "reviews" not in result
 
@@ -2448,7 +2412,7 @@
                                   repoid=self.REPO, raw_id=self.TEST_REVISION,
                                   with_reviews=True)
         response = api_call(self, params)
-        result = json.loads(response.body)["result"]
+        result = ext_json.loads(response.body)["result"]
         assert result["raw_id"] == self.TEST_REVISION
         assert "reviews" in result
         assert len(result["reviews"]) == 1
@@ -2468,7 +2432,7 @@
         id_, params = _build_data(self.apikey, 'get_changeset',
                                   repoid=self.REPO, raw_id = '7ab37bc680b4aa72c34d07b230c866c28e9fcfff')
         response = api_call(self, params)
-        expected = u'Changeset %s does not exist' % ('7ab37bc680b4aa72c34d07b230c866c28e9fcfff',)
+        expected = 'Changeset %s does not exist' % ('7ab37bc680b4aa72c34d07b230c866c28e9fcfff',)
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_get_changeset_without_permission(self):
@@ -2478,18 +2442,18 @@
         id_, params = _build_data(self.apikey_regular, 'get_changeset',
                                   repoid=self.REPO, raw_id=self.TEST_REVISION)
         response = api_call(self, params)
-        expected = u'Access denied to repo %s' % self.REPO
+        expected = 'Access denied to repo %s' % self.REPO
         self._compare_error(id_, expected, given=response.body)
 
     def test_api_get_pullrequest(self):
-        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u'get test')
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'get test')
         random_id = random.randrange(1, 9999)
-        params = json.dumps({
+        params = ascii_bytes(ext_json.dumps({
             "id": random_id,
             "api_key": self.apikey,
             "method": 'get_pullrequest',
             "args": {"pullrequest_id": pull_request_id},
-        })
+        }))
         response = api_call(self, params)
         pullrequest = PullRequest().get(pull_request_id)
         expected = {
@@ -2501,26 +2465,26 @@
             "org_repo_url": "http://localhost:80/%s" % self.REPO,
             "org_ref_parts": ["branch", "stable", self.TEST_PR_SRC],
             "other_ref_parts": ["branch", "default", self.TEST_PR_DST],
-            "comments": [{"username": TEST_USER_ADMIN_LOGIN, "text": "",
+            "comments": [{"username": base.TEST_USER_ADMIN_LOGIN, "text": "",
                          "comment_id": pullrequest.comments[0].comment_id}],
-            "owner": TEST_USER_ADMIN_LOGIN,
-            "statuses": [{"status": "under_review", "reviewer": TEST_USER_ADMIN_LOGIN, "modified_at": "2000-01-01T00:00:00"} for i in range(0, len(self.TEST_PR_REVISIONS))],
+            "owner": base.TEST_USER_ADMIN_LOGIN,
+            "statuses": [{"status": "under_review", "reviewer": base.TEST_USER_ADMIN_LOGIN, "modified_at": "2000-01-01T00:00:00"} for i in range(0, len(self.TEST_PR_REVISIONS))],
             "title": "get test",
             "revisions": self.TEST_PR_REVISIONS,
         }
         self._compare_ok(random_id, expected,
-                         given=re.sub("\d\d\d\d\-\d\d\-\d\dT\d\d\:\d\d\:\d\d",
-                                      "2000-01-01T00:00:00", response.body))
+                         given=re.sub(br"\d\d\d\d\-\d\d\-\d\dT\d\d\:\d\d\:\d\d",
+                                      b"2000-01-01T00:00:00", response.body))
 
     def test_api_close_pullrequest(self):
-        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u'close test')
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'close test')
         random_id = random.randrange(1, 9999)
-        params = json.dumps({
+        params = ascii_bytes(ext_json.dumps({
             "id": random_id,
             "api_key": self.apikey,
             "method": "comment_pullrequest",
             "args": {"pull_request_id": pull_request_id, "close_pr": True},
-        })
+        }))
         response = api_call(self, params)
         self._compare_ok(random_id, True, given=response.body)
         pullrequest = PullRequest().get(pull_request_id)
@@ -2529,40 +2493,40 @@
         assert pullrequest.is_closed() == True
 
     def test_api_status_pullrequest(self):
-        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u"status test")
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, "status test")
 
         random_id = random.randrange(1, 9999)
-        params = json.dumps({
+        params = ascii_bytes(ext_json.dumps({
             "id": random_id,
-            "api_key": User.get_by_username(TEST_USER_REGULAR2_LOGIN).api_key,
+            "api_key": User.get_by_username(base.TEST_USER_REGULAR2_LOGIN).api_key,
             "method": "comment_pullrequest",
             "args": {"pull_request_id": pull_request_id, "status": ChangesetStatus.STATUS_APPROVED},
-        })
+        }))
         response = api_call(self, params)
         pullrequest = PullRequest().get(pull_request_id)
         self._compare_error(random_id, "No permission to change pull request status. User needs to be admin, owner or reviewer.", given=response.body)
         assert ChangesetStatus.STATUS_UNDER_REVIEW == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
-        params = json.dumps({
+        params = ascii_bytes(ext_json.dumps({
             "id": random_id,
-            "api_key": User.get_by_username(TEST_USER_REGULAR_LOGIN).api_key,
+            "api_key": User.get_by_username(base.TEST_USER_REGULAR_LOGIN).api_key,
             "method": "comment_pullrequest",
             "args": {"pull_request_id": pull_request_id, "status": ChangesetStatus.STATUS_APPROVED},
-        })
+        }))
         response = api_call(self, params)
         self._compare_ok(random_id, True, given=response.body)
         pullrequest = PullRequest().get(pull_request_id)
         assert ChangesetStatus.STATUS_APPROVED == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
 
     def test_api_comment_pullrequest(self):
-        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u"comment test")
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, "comment test")
         random_id = random.randrange(1, 9999)
-        params = json.dumps({
+        params = ascii_bytes(ext_json.dumps({
             "id": random_id,
             "api_key": self.apikey,
             "method": "comment_pullrequest",
             "args": {"pull_request_id": pull_request_id, "comment_msg": "Looks good to me"},
-        })
+        }))
         response = api_call(self, params)
         self._compare_ok(random_id, True, given=response.body)
         pullrequest = PullRequest().get(pull_request_id)
-        assert pullrequest.comments[-1].text == u'Looks good to me'
+        assert pullrequest.comments[-1].text == 'Looks good to me'
--- a/kallithea/tests/api/test_api_git.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/api/test_api_git.py	Sat May 02 21:20:43 2020 +0200
@@ -20,9 +20,9 @@
     REPO = GIT_REPO
     REPO_TYPE = 'git'
     TEST_REVISION = GIT_TEST_REVISION
-    TEST_PR_SRC = u'c60f01b77c42dce653d6b1d3b04689862c261929'
-    TEST_PR_DST = u'10cddef6b794696066fb346434014f0a56810218'
-    TEST_PR_REVISIONS = [u'1bead5880d2dbe831762bf7fb439ba2919b75fdd',
-                         u'9bcd3ecfc8832a8cd881c1c1bbe2d13ffa9d94c7',
-                         u'283de4dfca8479875a1befb8d4059f3bbb725145',
-                         u'c60f01b77c42dce653d6b1d3b04689862c261929']
+    TEST_PR_SRC = 'c60f01b77c42dce653d6b1d3b04689862c261929'
+    TEST_PR_DST = '10cddef6b794696066fb346434014f0a56810218'
+    TEST_PR_REVISIONS = ['1bead5880d2dbe831762bf7fb439ba2919b75fdd',
+                         '9bcd3ecfc8832a8cd881c1c1bbe2d13ffa9d94c7',
+                         '283de4dfca8479875a1befb8d4059f3bbb725145',
+                         'c60f01b77c42dce653d6b1d3b04689862c261929']
--- a/kallithea/tests/api/test_api_hg.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/api/test_api_hg.py	Sat May 02 21:20:43 2020 +0200
@@ -20,10 +20,10 @@
     REPO = HG_REPO
     REPO_TYPE = 'hg'
     TEST_REVISION = HG_TEST_REVISION
-    TEST_PR_SRC = u'4f7e2131323e0749a740c0a56ab68ae9269c562a'
-    TEST_PR_DST = u'92831aebf2f8dd4879e897024b89d09af214df1c'
-    TEST_PR_REVISIONS = [u'720bbdb27665d6262b313e8a541b654d0cbd5b27',
-                         u'f41649565a9e89919a588a163e717b4084f8a3b1',
-                         u'94f45ed825a113e61af7e141f44ca578374abef0',
-                         u'fef5bfe1dc17611d5fb59a7f6f95c55c3606f933',
-                         u'4f7e2131323e0749a740c0a56ab68ae9269c562a']
+    TEST_PR_SRC = '4f7e2131323e0749a740c0a56ab68ae9269c562a'
+    TEST_PR_DST = '92831aebf2f8dd4879e897024b89d09af214df1c'
+    TEST_PR_REVISIONS = ['720bbdb27665d6262b313e8a541b654d0cbd5b27',
+                         'f41649565a9e89919a588a163e717b4084f8a3b1',
+                         '94f45ed825a113e61af7e141f44ca578374abef0',
+                         'fef5bfe1dc17611d5fb59a7f6f95c55c3606f933',
+                         '4f7e2131323e0749a740c0a56ab68ae9269c562a']
--- a/kallithea/tests/base.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/base.py	Sat May 02 21:20:43 2020 +0200
@@ -22,7 +22,7 @@
 import pytest
 from webtest import TestApp
 
-from kallithea.lib.utils2 import safe_str
+from kallithea.lib.utils2 import ascii_str
 from kallithea.model.db import User
 
 
@@ -66,17 +66,17 @@
 
 IP_ADDR = '127.0.0.127'
 
-HG_REPO = u'vcs_test_hg'
-GIT_REPO = u'vcs_test_git'
+HG_REPO = 'vcs_test_hg'
+GIT_REPO = 'vcs_test_git'
 
-NEW_HG_REPO = u'vcs_test_hg_new'
-NEW_GIT_REPO = u'vcs_test_git_new'
+NEW_HG_REPO = 'vcs_test_hg_new'
+NEW_GIT_REPO = 'vcs_test_git_new'
 
-HG_FORK = u'vcs_test_hg_fork'
-GIT_FORK = u'vcs_test_git_fork'
+HG_FORK = 'vcs_test_hg_fork'
+GIT_FORK = 'vcs_test_git_fork'
 
-HG_TEST_REVISION = u"a53d9201d4bc278910d416d94941b7ea007ecd52"
-GIT_TEST_REVISION = u"7ab37bc680b4aa72c34d07b230c866c28e9fc204"
+HG_TEST_REVISION = "a53d9201d4bc278910d416d94941b7ea007ecd52"
+GIT_TEST_REVISION = "7ab37bc680b4aa72c34d07b230c866c28e9fc204"
 
 
 ## VCS
@@ -156,7 +156,7 @@
                                   'password': password,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
-        if 'Invalid username or password' in response.body:
+        if b'Invalid username or password' in response.body:
             pytest.fail('could not login using %s %s' % (username, password))
 
         assert response.status == '302 Found'
@@ -176,20 +176,19 @@
         assert user == expected_username
 
     def session_csrf_secret_token(self):
-        return self.app.get(url('session_csrf_secret_token')).body
+        return ascii_str(self.app.get(url('session_csrf_secret_token')).body)
 
     def checkSessionFlash(self, response, msg=None, skip=0, _matcher=lambda msg, m: msg in m):
         if 'flash' not in response.session:
-            pytest.fail(safe_str(u'msg `%s` not found - session has no flash:\n%s' % (msg, response)))
+            pytest.fail('msg `%s` not found - session has no flash:\n%s' % (msg, response))
         try:
             level, m = response.session['flash'][-1 - skip]
             if _matcher(msg, m):
                 return
         except IndexError:
             pass
-        pytest.fail(safe_str(u'msg `%s` not found in session flash (skipping %s): %s' %
-                           (msg, skip,
-                            ', '.join('`%s`' % m for level, m in response.session['flash']))))
+        pytest.fail('msg `%s` not found in session flash (skipping %s): %s' %
+                    (msg, skip, ', '.join('`%s`' % m for level, m in response.session['flash'])))
 
     def checkSessionFlashRegex(self, response, regex, skip=0):
         self.checkSessionFlash(response, regex, skip=skip, _matcher=re.search)
--- a/kallithea/tests/conftest.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/conftest.py	Sat May 02 21:20:43 2020 +0200
@@ -45,7 +45,6 @@
             #'ssh_locale': 'C',
             'app_instance_uuid': 'test',
             'show_revision_number': 'true',
-            'beaker.cache.sql_cache_short.expire': '1',
             'session.secret': '{74e0cd75-b339-478b-b129-07dd221def1f}',
             #'i18n.lang': '',
         },
@@ -146,7 +145,7 @@
     user_model = UserModel()
 
     user_ids = []
-    user_ids.append(User.get_default_user().user_id)
+    user_ids.append(kallithea.DEFAULT_USER_ID)
     user_ids.append(User.get_by_username(TEST_USER_REGULAR_LOGIN).user_id)
 
     for user_id in user_ids:
--- a/kallithea/tests/fixture.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/fixture.py	Sat May 02 21:20:43 2020 +0200
@@ -41,8 +41,8 @@
 from kallithea.model.scm import ScmModel
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
-from kallithea.tests.base import (
-    GIT_REPO, HG_REPO, IP_ADDR, TEST_USER_ADMIN_EMAIL, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH, invalidate_all_caches)
+from kallithea.tests.base import (GIT_REPO, HG_REPO, IP_ADDR, TEST_USER_ADMIN_EMAIL, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH,
+                                  invalidate_all_caches)
 
 
 log = logging.getLogger(__name__)
@@ -92,8 +92,8 @@
             repo_name=None,
             repo_type='hg',
             clone_uri='',
-            repo_group=u'-1',
-            repo_description=u'DESC',
+            repo_group='-1',
+            repo_description='DESC',
             repo_private=False,
             repo_landing_rev='rev:tip',
             repo_copy_permissions=False,
@@ -113,8 +113,8 @@
         """Return form values to be validated through RepoGroupForm"""
         defs = dict(
             group_name=None,
-            group_description=u'DESC',
-            parent_group_id=u'-1',
+            group_description='DESC',
+            parent_group_id='-1',
             perms_updates=[],
             perms_new=[],
             recursive=False
@@ -128,8 +128,8 @@
             username=name,
             password='qweqwe',
             email='%s+test@example.com' % name,
-            firstname=u'TestUser',
-            lastname=u'Test',
+            firstname='TestUser',
+            lastname='Test',
             active=True,
             admin=False,
             extern_type='internal',
@@ -142,7 +142,7 @@
     def _get_user_group_create_params(self, name, **custom):
         defs = dict(
             users_group_name=name,
-            user_group_description=u'DESC',
+            user_group_description='DESC',
             users_group_active=True,
             user_group_data={},
         )
@@ -253,7 +253,7 @@
 
     def create_gist(self, **kwargs):
         form_data = {
-            'description': u'new-gist',
+            'description': 'new-gist',
             'owner': TEST_USER_ADMIN_LOGIN,
             'gist_type': Gist.GIST_PUBLIC,
             'lifetime': -1,
@@ -324,12 +324,12 @@
         return cs
 
     def review_changeset(self, repo, revision, status, author=TEST_USER_ADMIN_LOGIN):
-        comment = ChangesetCommentsModel().create(u"review comment", repo, author, revision=revision, send_email=False)
+        comment = ChangesetCommentsModel().create("review comment", repo, author, revision=revision, send_email=False)
         csm = ChangesetStatusModel().set_status(repo, ChangesetStatus.STATUS_APPROVED, author, comment, revision=revision)
         Session().commit()
         return csm
 
-    def create_pullrequest(self, testcontroller, repo_name, pr_src_rev, pr_dst_rev, title=u'title'):
+    def create_pullrequest(self, testcontroller, repo_name, pr_src_rev, pr_dst_rev, title='title'):
         org_ref = 'branch:stable:%s' % pr_src_rev
         other_ref = 'branch:default:%s' % pr_dst_rev
         with test_context(testcontroller.app): # needed to be able to mock request user
@@ -339,7 +339,7 @@
             request.authuser = AuthUser(dbuser=owner_user)
             # creating a PR sends a message with an absolute URL - without routing that requires mocking
             with mock.patch.object(helpers, 'url', (lambda arg, qualified=False, **kwargs: ('https://localhost' if qualified else '') + '/fake/' + arg)):
-                cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, u'No description', owner_user, reviewers)
+                cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, 'No description', owner_user, reviewers)
                 pull_request = cmd.execute()
             Session().commit()
         return pull_request.pull_request_id
@@ -423,7 +423,7 @@
 
 
 def failing_test_hook(ui, repo, **kwargs):
-    ui.write("failing_test_hook failed\n")
+    ui.write(b"failing_test_hook failed\n")
     return 1
 
 
@@ -432,5 +432,5 @@
 
 
 def passing_test_hook(ui, repo, **kwargs):
-    ui.write("passing_test_hook succeeded\n")
+    ui.write(b"passing_test_hook succeeded\n")
     return 0
--- a/kallithea/tests/functional/test_admin.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin.py	Sat May 02 21:20:43 2020 +0200
@@ -3,16 +3,15 @@
 import os
 from os.path import dirname
 
-from kallithea.lib.utils2 import safe_unicode
 from kallithea.model.db import UserLog
 from kallithea.model.meta import Session
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
 FIXTURES = os.path.join(dirname(dirname(os.path.abspath(__file__))), 'fixtures')
 
 
-class TestAdminController(TestController):
+class TestAdminController(base.TestController):
 
     @classmethod
     def setup_class(cls):
@@ -34,8 +33,7 @@
         with open(os.path.join(FIXTURES, 'journal_dump.csv')) as f:
             for row in csv.DictReader(f):
                 ul = UserLog()
-                for k, v in row.iteritems():
-                    v = safe_unicode(v)
+                for k, v in row.items():
                     if k == 'action_date':
                         v = strptime(v)
                     if k in ['user_id', 'repository_id']:
@@ -52,105 +50,105 @@
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index'))
+        response = self.app.get(base.url(controller='admin/admin', action='index'))
         response.mustcontain('Admin Journal')
 
     def test_filter_all_entries(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',))
+        response = self.app.get(base.url(controller='admin/admin', action='index',))
         response.mustcontain(' 2036 Entries')
 
     def test_filter_journal_filter_exact_match_on_repository(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='repository:xxx'))
         response.mustcontain(' 3 Entries')
 
     def test_filter_journal_filter_exact_match_on_repository_CamelCase(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='repository:XxX'))
         response.mustcontain(' 3 Entries')
 
     def test_filter_journal_filter_wildcard_on_repository(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='repository:*test*'))
         response.mustcontain(' 862 Entries')
 
     def test_filter_journal_filter_prefix_on_repository(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='repository:test*'))
         response.mustcontain(' 257 Entries')
 
     def test_filter_journal_filter_prefix_on_repository_CamelCase(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='repository:Test*'))
         response.mustcontain(' 257 Entries')
 
     def test_filter_journal_filter_prefix_on_repository_and_user(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='repository:test* AND username:demo'))
         response.mustcontain(' 130 Entries')
 
     def test_filter_journal_filter_prefix_on_repository_or_other_repo(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='repository:test* OR repository:xxx'))
         response.mustcontain(' 260 Entries')  # 257 + 3
 
     def test_filter_journal_filter_exact_match_on_username(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='username:demo'))
         response.mustcontain(' 1087 Entries')
 
     def test_filter_journal_filter_exact_match_on_username_camelCase(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='username:DemO'))
         response.mustcontain(' 1087 Entries')
 
     def test_filter_journal_filter_wildcard_on_username(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='username:*test*'))
         response.mustcontain(' 100 Entries')
 
     def test_filter_journal_filter_prefix_on_username(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='username:demo*'))
         response.mustcontain(' 1101 Entries')
 
     def test_filter_journal_filter_prefix_on_user_or_other_user(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='username:demo OR username:volcan'))
         response.mustcontain(' 1095 Entries')  # 1087 + 8
 
     def test_filter_journal_filter_wildcard_on_action(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='action:*pull_request*'))
         response.mustcontain(' 187 Entries')
 
     def test_filter_journal_filter_on_date(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='date:20121010'))
         response.mustcontain(' 47 Entries')
 
     def test_filter_journal_filter_on_date_2(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter='date:20121020'))
         response.mustcontain(' 17 Entries')
 
-    @parametrize('filter,hit', [
+    @base.parametrize('filter,hit', [
         #### "repository:" filtering
         # "/" is used for grouping
         ('repository:group/test', 4),
@@ -189,7 +187,7 @@
     def test_filter_journal_filter_tokenization(self, filter, hit):
         self.log_user()
 
-        response = self.app.get(url(controller='admin/admin', action='index',
+        response = self.app.get(base.url(controller='admin/admin', action='index',
                                     filter=filter))
         if hit != 1:
             response.mustcontain(' %s Entries' % hit)
--- a/kallithea/tests/functional/test_admin_auth_settings.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_auth_settings.py	Sat May 02 21:20:43 2020 +0200
@@ -1,10 +1,10 @@
 from kallithea.model.db import Setting
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestAuthSettingsController(TestController):
+class TestAuthSettingsController(base.TestController):
     def _enable_plugins(self, plugins_list):
-        test_url = url(controller='admin/auth_settings',
+        test_url = base.url(controller='admin/auth_settings',
                        action='auth_settings')
         params={'auth_plugins': plugins_list, '_session_csrf_secret_token': self.session_csrf_secret_token()}
 
@@ -17,16 +17,16 @@
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url(controller='admin/auth_settings',
+        response = self.app.get(base.url(controller='admin/auth_settings',
                                     action='index'))
         response.mustcontain('Authentication Plugins')
 
-    @skipif(not ldap_lib_installed, reason='skipping due to missing ldap lib')
+    @base.skipif(not base.ldap_lib_installed, reason='skipping due to missing ldap lib')
     def test_ldap_save_settings(self):
         self.log_user()
 
         params = self._enable_plugins('kallithea.lib.auth_modules.auth_internal,kallithea.lib.auth_modules.auth_ldap')
-        params.update({'auth_ldap_host': u'dc.example.com',
+        params.update({'auth_ldap_host': 'dc.example.com',
                        'auth_ldap_port': '999',
                        'auth_ldap_tls_kind': 'PLAIN',
                        'auth_ldap_tls_reqcert': 'NEVER',
@@ -41,16 +41,16 @@
                        'auth_ldap_attr_lastname': 'tester',
                        'auth_ldap_attr_email': 'test@example.com'})
 
-        test_url = url(controller='admin/auth_settings',
+        test_url = base.url(controller='admin/auth_settings',
                        action='auth_settings')
 
         response = self.app.post(url=test_url, params=params)
         self.checkSessionFlash(response, 'Auth settings updated successfully')
 
         new_settings = Setting.get_auth_settings()
-        assert new_settings['auth_ldap_host'] == u'dc.example.com', 'fail db write compare'
+        assert new_settings['auth_ldap_host'] == 'dc.example.com', 'fail db write compare'
 
-    @skipif(not ldap_lib_installed, reason='skipping due to missing ldap lib')
+    @base.skipif(not base.ldap_lib_installed, reason='skipping due to missing ldap lib')
     def test_ldap_error_form_wrong_port_number(self):
         self.log_user()
 
@@ -68,7 +68,7 @@
                        'auth_ldap_attr_firstname': '',
                        'auth_ldap_attr_lastname': '',
                        'auth_ldap_attr_email': ''})
-        test_url = url(controller='admin/auth_settings',
+        test_url = base.url(controller='admin/auth_settings',
                        action='auth_settings')
 
         response = self.app.post(url=test_url, params=params)
@@ -76,7 +76,7 @@
         response.mustcontain("""<span class="error-message">"""
                              """Please enter a number</span>""")
 
-    @skipif(not ldap_lib_installed, reason='skipping due to missing ldap lib')
+    @base.skipif(not base.ldap_lib_installed, reason='skipping due to missing ldap lib')
     def test_ldap_error_form(self):
         self.log_user()
 
@@ -95,7 +95,7 @@
                        'auth_ldap_attr_lastname': '',
                        'auth_ldap_attr_email': ''})
 
-        test_url = url(controller='admin/auth_settings',
+        test_url = base.url(controller='admin/auth_settings',
                        action='auth_settings')
 
         response = self.app.post(url=test_url, params=params)
@@ -115,7 +115,7 @@
         params = self._enable_plugins('kallithea.lib.auth_modules.auth_internal,kallithea.lib.auth_modules.auth_container')
         params.update(settings)
 
-        test_url = url(controller='admin/auth_settings',
+        test_url = base.url(controller='admin/auth_settings',
                        action='auth_settings')
 
         response = self.app.post(url=test_url, params=params)
@@ -124,7 +124,7 @@
 
     def _container_auth_verify_login(self, resulting_username, **get_kwargs):
         response = self.app.get(
-            url=url(controller='admin/my_account', action='my_account'),
+            url=base.url(controller='admin/my_account', action='my_account'),
             **get_kwargs
         )
         response.mustcontain('My Account %s' % resulting_username)
@@ -153,7 +153,7 @@
             auth_container_clean_username='False',
         )
         response = self.app.get(
-            url=url(controller='admin/my_account', action='my_account'),
+            url=base.url(controller='admin/my_account', action='my_account'),
             extra_environ={'THE_USER_NAME': 'johnd',
                            'THE_USER_EMAIL': 'john@example.org',
                            'THE_USER_FIRSTNAME': 'John',
@@ -216,10 +216,10 @@
             auth_container_clean_username='True',
         )
         response = self.app.get(
-            url=url(controller='admin/my_account', action='my_account'),
+            url=base.url(controller='admin/my_account', action='my_account'),
             extra_environ={'REMOTE_USER': 'john'},
         )
-        assert 'Log Out' not in response.normal_body
+        assert b'Log Out' not in response.normal_body
 
     def test_crowd_save_settings(self):
         self.log_user()
@@ -232,16 +232,16 @@
                        'auth_crowd_method': 'https',
                        'auth_crowd_app_name': 'xyzzy'})
 
-        test_url = url(controller='admin/auth_settings',
+        test_url = base.url(controller='admin/auth_settings',
                        action='auth_settings')
 
         response = self.app.post(url=test_url, params=params)
         self.checkSessionFlash(response, 'Auth settings updated successfully')
 
         new_settings = Setting.get_auth_settings()
-        assert new_settings['auth_crowd_host'] == u'hostname', 'fail db write compare'
+        assert new_settings['auth_crowd_host'] == 'hostname', 'fail db write compare'
 
-    @skipif(not pam_lib_installed, reason='skipping due to missing pam lib')
+    @base.skipif(not base.pam_lib_installed, reason='skipping due to missing pam lib')
     def test_pam_save_settings(self):
         self.log_user()
 
@@ -249,11 +249,11 @@
         params.update({'auth_pam_service': 'kallithea',
                        'auth_pam_gecos': '^foo-.*'})
 
-        test_url = url(controller='admin/auth_settings',
+        test_url = base.url(controller='admin/auth_settings',
                        action='auth_settings')
 
         response = self.app.post(url=test_url, params=params)
         self.checkSessionFlash(response, 'Auth settings updated successfully')
 
         new_settings = Setting.get_auth_settings()
-        assert new_settings['auth_pam_service'] == u'kallithea', 'fail db write compare'
+        assert new_settings['auth_pam_service'] == 'kallithea', 'fail db write compare'
--- a/kallithea/tests/functional/test_admin_defaults.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_defaults.py	Sat May 02 21:20:43 2020 +0200
@@ -1,12 +1,12 @@
 from kallithea.model.db import Setting
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestDefaultsController(TestController):
+class TestDefaultsController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url('defaults'))
+        response = self.app.get(base.url('defaults'))
         response.mustcontain('default_repo_private')
         response.mustcontain('default_repo_enable_statistics')
         response.mustcontain('default_repo_enable_downloads')
@@ -20,7 +20,7 @@
             'default_repo_type': 'hg',
             '_session_csrf_secret_token': self.session_csrf_secret_token(),
         }
-        response = self.app.post(url('defaults_update', id='default'), params=params)
+        response = self.app.post(base.url('defaults_update', id='default'), params=params)
         self.checkSessionFlash(response, 'Default settings updated successfully')
 
         params.pop('_session_csrf_secret_token')
@@ -36,7 +36,7 @@
             'default_repo_type': 'git',
             '_session_csrf_secret_token': self.session_csrf_secret_token(),
         }
-        response = self.app.post(url('defaults_update', id='default'), params=params)
+        response = self.app.post(base.url('defaults_update', id='default'), params=params)
         self.checkSessionFlash(response, 'Default settings updated successfully')
 
         params.pop('_session_csrf_secret_token')
--- a/kallithea/tests/functional/test_admin_gists.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_gists.py	Sat May 02 21:20:43 2020 +0200
@@ -1,24 +1,24 @@
 from kallithea.model.db import Gist, User
 from kallithea.model.gist import GistModel
 from kallithea.model.meta import Session
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
 def _create_gist(f_name, content='some gist', lifetime=-1,
-                 description=u'gist-desc', gist_type='public',
-                 owner=TEST_USER_ADMIN_LOGIN):
+                 description='gist-desc', gist_type='public',
+                 owner=base.TEST_USER_ADMIN_LOGIN):
     gist_mapping = {
         f_name: {'content': content}
     }
     owner = User.get_by_username(owner)
-    gist = GistModel().create(description, owner=owner, ip_addr=IP_ADDR,
+    gist = GistModel().create(description, owner=owner, ip_addr=base.IP_ADDR,
                        gist_mapping=gist_mapping, gist_type=gist_type,
                        lifetime=lifetime)
     Session().commit()
     return gist
 
 
-class TestGistsController(TestController):
+class TestGistsController(base.TestController):
 
     def teardown_method(self, method):
         for g in Gist.query():
@@ -27,15 +27,15 @@
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url('gists'))
+        response = self.app.get(base.url('gists'))
         # Test response...
         response.mustcontain('There are no gists yet')
 
         g1 = _create_gist('gist1').gist_access_id
         g2 = _create_gist('gist2', lifetime=1400).gist_access_id
-        g3 = _create_gist('gist3', description=u'gist3-desc').gist_access_id
+        g3 = _create_gist('gist3', description='gist3-desc').gist_access_id
         g4 = _create_gist('gist4', gist_type='private').gist_access_id
-        response = self.app.get(url('gists'))
+        response = self.app.get(base.url('gists'))
         # Test response...
         response.mustcontain('gist: %s' % g1)
         response.mustcontain('gist: %s' % g2)
@@ -47,7 +47,7 @@
     def test_index_private_gists(self):
         self.log_user()
         gist = _create_gist('gist5', gist_type='private')
-        response = self.app.get(url('gists', private=1))
+        response = self.app.get(base.url('gists', private=1))
         # Test response...
 
         # and privates
@@ -55,7 +55,7 @@
 
     def test_create_missing_description(self):
         self.log_user()
-        response = self.app.post(url('gists'),
+        response = self.app.post(base.url('gists'),
                                  params={'lifetime': -1, '_session_csrf_secret_token': self.session_csrf_secret_token()},
                                  status=200)
 
@@ -63,7 +63,7 @@
 
     def test_create(self):
         self.log_user()
-        response = self.app.post(url('gists'),
+        response = self.app.post(base.url('gists'),
                                  params={'lifetime': -1,
                                          'content': 'gist test',
                                          'filename': 'foo',
@@ -77,7 +77,7 @@
 
     def test_create_with_path_with_dirs(self):
         self.log_user()
-        response = self.app.post(url('gists'),
+        response = self.app.post(base.url('gists'),
                                  params={'lifetime': -1,
                                          'content': 'gist test',
                                          'filename': '/home/foo',
@@ -92,11 +92,11 @@
         gist.gist_expires = 0  # 1970
         Session().commit()
 
-        response = self.app.get(url('gist', gist_id=gist.gist_access_id), status=404)
+        response = self.app.get(base.url('gist', gist_id=gist.gist_access_id), status=404)
 
     def test_create_private(self):
         self.log_user()
-        response = self.app.post(url('gists'),
+        response = self.app.post(base.url('gists'),
                                  params={'lifetime': -1,
                                          'content': 'private gist test',
                                          'filename': 'private-foo',
@@ -110,7 +110,7 @@
 
     def test_create_with_description(self):
         self.log_user()
-        response = self.app.post(url('gists'),
+        response = self.app.post(base.url('gists'),
                                  params={'lifetime': -1,
                                          'content': 'gist test',
                                          'filename': 'foo-desc',
@@ -126,46 +126,46 @@
 
     def test_new(self):
         self.log_user()
-        response = self.app.get(url('new_gist'))
+        response = self.app.get(base.url('new_gist'))
 
     def test_delete(self):
         self.log_user()
         gist = _create_gist('delete-me')
-        response = self.app.post(url('gist_delete', gist_id=gist.gist_id),
+        response = self.app.post(base.url('gist_delete', gist_id=gist.gist_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
     def test_delete_normal_user_his_gist(self):
-        self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
-        gist = _create_gist('delete-me', owner=TEST_USER_REGULAR_LOGIN)
-        response = self.app.post(url('gist_delete', gist_id=gist.gist_id),
+        self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
+        gist = _create_gist('delete-me', owner=base.TEST_USER_REGULAR_LOGIN)
+        response = self.app.post(base.url('gist_delete', gist_id=gist.gist_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
     def test_delete_normal_user_not_his_own_gist(self):
-        self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
+        self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
         gist = _create_gist('delete-me')
-        response = self.app.post(url('gist_delete', gist_id=gist.gist_id), status=403,
+        response = self.app.post(base.url('gist_delete', gist_id=gist.gist_id), status=403,
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
     def test_show(self):
         gist = _create_gist('gist-show-me')
-        response = self.app.get(url('gist', gist_id=gist.gist_access_id))
+        response = self.app.get(base.url('gist', gist_id=gist.gist_access_id))
         response.mustcontain('added file: gist-show-me<')
-        response.mustcontain('%s - created' % TEST_USER_ADMIN_LOGIN)
+        response.mustcontain('%s - created' % base.TEST_USER_ADMIN_LOGIN)
         response.mustcontain('gist-desc')
         response.mustcontain('<div class="label label-success">Public Gist</div>')
 
     def test_show_as_raw(self):
         gist = _create_gist('gist-show-me', content='GIST CONTENT')
-        response = self.app.get(url('formatted_gist',
+        response = self.app.get(base.url('formatted_gist',
                                     gist_id=gist.gist_access_id, format='raw'))
-        assert response.body == 'GIST CONTENT'
+        assert response.body == b'GIST CONTENT'
 
     def test_show_as_raw_individual_file(self):
         gist = _create_gist('gist-show-me-raw', content='GIST BODY')
-        response = self.app.get(url('formatted_gist_file',
+        response = self.app.get(base.url('formatted_gist_file',
                                     gist_id=gist.gist_access_id, format='raw',
                                     revision='tip', f_path='gist-show-me-raw'))
-        assert response.body == 'GIST BODY'
+        assert response.body == b'GIST BODY'
 
     def test_edit(self):
-        response = self.app.get(url('edit_gist', gist_id=1))
+        response = self.app.get(base.url('edit_gist', gist_id=1))
--- a/kallithea/tests/functional/test_admin_permissions.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_permissions.py	Sat May 02 21:20:43 2020 +0200
@@ -1,81 +1,82 @@
+import kallithea
 from kallithea.model.db import User, UserIpMap
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestAdminPermissionsController(TestController):
+class TestAdminPermissionsController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url('admin_permissions'))
+        response = self.app.get(base.url('admin_permissions'))
         # Test response...
 
     def test_index_ips(self):
         self.log_user()
-        response = self.app.get(url('admin_permissions_ips'))
+        response = self.app.get(base.url('admin_permissions_ips'))
         # Test response...
         response.mustcontain('All IP addresses are allowed')
 
     def test_add_delete_ips(self, auto_clear_ip_permissions):
         self.log_user()
-        default_user_id = User.get_default_user().user_id
+        default_user_id = kallithea.DEFAULT_USER_ID
 
         # Add IP and verify it is shown in UI and both gives access and rejects
 
-        response = self.app.post(url('edit_user_ips_update', id=default_user_id),
+        response = self.app.post(base.url('edit_user_ips_update', id=default_user_id),
                                  params=dict(new_ip='0.0.0.0/24',
                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
-        invalidate_all_caches()
-        response = self.app.get(url('admin_permissions_ips'),
+        base.invalidate_all_caches()
+        response = self.app.get(base.url('admin_permissions_ips'),
                                 extra_environ={'REMOTE_ADDR': '0.0.0.1'})
         response.mustcontain('0.0.0.0/24')
         response.mustcontain('0.0.0.0 - 0.0.0.255')
 
-        response = self.app.get(url('admin_permissions_ips'),
+        response = self.app.get(base.url('admin_permissions_ips'),
                                 extra_environ={'REMOTE_ADDR': '0.0.1.1'}, status=403)
 
         # Add another IP and verify previously rejected now works
 
-        response = self.app.post(url('edit_user_ips_update', id=default_user_id),
+        response = self.app.post(base.url('edit_user_ips_update', id=default_user_id),
                                  params=dict(new_ip='0.0.1.0/24',
                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
-        invalidate_all_caches()
+        base.invalidate_all_caches()
 
-        response = self.app.get(url('admin_permissions_ips'),
+        response = self.app.get(base.url('admin_permissions_ips'),
                                 extra_environ={'REMOTE_ADDR': '0.0.1.1'})
 
         # Delete latest IP and verify same IP is rejected again
 
         x = UserIpMap.query().filter_by(ip_addr='0.0.1.0/24').first()
-        response = self.app.post(url('edit_user_ips_delete', id=default_user_id),
+        response = self.app.post(base.url('edit_user_ips_delete', id=default_user_id),
                                  params=dict(del_ip_id=x.ip_id,
                                              _session_csrf_secret_token=self.session_csrf_secret_token()))
-        invalidate_all_caches()
+        base.invalidate_all_caches()
 
-        response = self.app.get(url('admin_permissions_ips'),
+        response = self.app.get(base.url('admin_permissions_ips'),
                                 extra_environ={'REMOTE_ADDR': '0.0.1.1'}, status=403)
 
         # Delete first IP and verify unlimited access again
 
         x = UserIpMap.query().filter_by(ip_addr='0.0.0.0/24').first()
-        response = self.app.post(url('edit_user_ips_delete', id=default_user_id),
+        response = self.app.post(base.url('edit_user_ips_delete', id=default_user_id),
                                  params=dict(del_ip_id=x.ip_id,
                                              _session_csrf_secret_token=self.session_csrf_secret_token()))
-        invalidate_all_caches()
+        base.invalidate_all_caches()
 
-        response = self.app.get(url('admin_permissions_ips'),
+        response = self.app.get(base.url('admin_permissions_ips'),
                                 extra_environ={'REMOTE_ADDR': '0.0.1.1'})
 
     def test_index_overview(self):
         self.log_user()
-        response = self.app.get(url('admin_permissions_perms'))
+        response = self.app.get(base.url('admin_permissions_perms'))
         # Test response...
 
     def test_edit_permissions_permissions(self):
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
 
         # Test unauthenticated access - it will redirect to login page
         response = self.app.post(
-            url('edit_repo_perms_update', repo_name=HG_REPO),
+            base.url('edit_repo_perms_update', repo_name=base.HG_REPO),
             params=dict(
                 perm_new_member_1='repository.read',
                 perm_new_member_name_1=user.username,
@@ -83,24 +84,24 @@
                 _session_csrf_secret_token=self.session_csrf_secret_token()),
             status=302)
 
-        assert not response.location.endswith(url('edit_repo_perms_update', repo_name=HG_REPO))
-        assert response.location.endswith(url('login_home', came_from=url('edit_repo_perms_update', repo_name=HG_REPO)))
+        assert not response.location.endswith(base.url('edit_repo_perms_update', repo_name=base.HG_REPO))
+        assert response.location.endswith(base.url('login_home', came_from=base.url('edit_repo_perms_update', repo_name=base.HG_REPO)))
 
         response = self.app.post(
-            url('edit_repo_perms_revoke', repo_name=HG_REPO),
+            base.url('edit_repo_perms_revoke', repo_name=base.HG_REPO),
             params=dict(
                 obj_type='user',
                 user_id=user.user_id,
                 _session_csrf_secret_token=self.session_csrf_secret_token()),
             status=302)
 
-        assert response.location.endswith(url('login_home', came_from=url('edit_repo_perms_revoke', repo_name=HG_REPO)))
+        assert response.location.endswith(base.url('login_home', came_from=base.url('edit_repo_perms_revoke', repo_name=base.HG_REPO)))
 
         # Test authenticated access
         self.log_user()
 
         response = self.app.post(
-            url('edit_repo_perms_update', repo_name=HG_REPO),
+            base.url('edit_repo_perms_update', repo_name=base.HG_REPO),
             params=dict(
                 perm_new_member_1='repository.read',
                 perm_new_member_name_1=user.username,
@@ -108,10 +109,10 @@
                 _session_csrf_secret_token=self.session_csrf_secret_token()),
             status=302)
 
-        assert response.location.endswith(url('edit_repo_perms_update', repo_name=HG_REPO))
+        assert response.location.endswith(base.url('edit_repo_perms_update', repo_name=base.HG_REPO))
 
         response = self.app.post(
-            url('edit_repo_perms_revoke', repo_name=HG_REPO),
+            base.url('edit_repo_perms_revoke', repo_name=base.HG_REPO),
             params=dict(
                 obj_type='user',
                 user_id=user.user_id,
--- a/kallithea/tests/functional/test_admin_repo_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_repo_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -11,7 +11,7 @@
 
     def test_case_insensitivity(self):
         self.log_user()
-        group_name = u'newgroup'
+        group_name = 'newgroup'
         response = self.app.post(url('repos_groups'),
                                  fixture._get_repo_group_create_params(group_name=group_name,
                                                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
--- a/kallithea/tests/functional/test_admin_repos.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_repos.py	Sat May 02 21:20:43 2020 +0200
@@ -1,19 +1,19 @@
 # -*- coding: utf-8 -*-
 
 import os
-import urllib
+import urllib.parse
 
 import mock
 import pytest
 
 from kallithea.lib import vcs
-from kallithea.lib.utils2 import safe_str, safe_unicode
-from kallithea.model.db import Permission, RepoGroup, Repository, Ui, User, UserRepoToPerm
+from kallithea.model import db
+from kallithea.model.db import Permission, Repository, Ui, User, UserRepoToPerm
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user import UserModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture, error_function
 
 
@@ -29,7 +29,7 @@
     return perm
 
 
-class _BaseTestCase(TestController):
+class _BaseTestCase(base.TestController):
     """
     Write all tests here
     """
@@ -41,21 +41,21 @@
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url('repos'))
+        response = self.app.get(base.url('repos'))
 
     def test_create(self):
         self.log_user()
         repo_name = self.NEW_REPO
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
                                                 repo_description=description,
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=repo_name))
-        assert response.json == {u'result': True}
+        response = self.app.get(base.url('repo_check_home', repo_name=repo_name))
+        assert response.json == {'result': True}
         self.checkSessionFlash(response,
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name, repo_name))
@@ -68,13 +68,13 @@
         assert new_repo.description == description
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=repo_name))
+        response = self.app.get(base.url('summary_home', repo_name=repo_name))
         response.mustcontain(repo_name)
         response.mustcontain(self.REPO_TYPE)
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)))
+            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
         except vcs.exceptions.VCSError:
             pytest.fail('no repo %s in filesystem' % repo_name)
 
@@ -84,8 +84,8 @@
     def test_case_insensitivity(self):
         self.log_user()
         repo_name = self.NEW_REPO
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                                  fixture._get_repo_create_params(repo_private=False,
                                                                  repo_name=repo_name,
                                                                  repo_type=self.REPO_TYPE,
@@ -93,7 +93,7 @@
                                                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
         # try to create repo with swapped case
         swapped_repo_name = repo_name.swapcase()
-        response = self.app.post(url('repos'),
+        response = self.app.post(base.url('repos'),
                                  fixture._get_repo_create_params(repo_private=False,
                                                                  repo_name=swapped_repo_name,
                                                                  repo_type=self.REPO_TYPE,
@@ -108,16 +108,16 @@
         self.log_user()
 
         ## create GROUP
-        group_name = u'sometest_%s' % self.REPO_TYPE
+        group_name = 'sometest_%s' % self.REPO_TYPE
         gr = RepoGroupModel().create(group_name=group_name,
-                                     group_description=u'test',
-                                     owner=TEST_USER_ADMIN_LOGIN)
+                                     group_description='test',
+                                     owner=base.TEST_USER_ADMIN_LOGIN)
         Session().commit()
 
-        repo_name = u'ingroup'
-        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        repo_name = 'ingroup'
+        repo_name_full = db.URL_SEP.join([group_name, repo_name])
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -125,8 +125,8 @@
                                                 repo_group=gr.group_id,
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
-        assert response.json == {u'result': True}
+        response = self.app.get(base.url('repo_check_home', repo_name=repo_name_full))
+        assert response.json == {'result': True}
         self.checkSessionFlash(response,
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name_full, repo_name_full))
@@ -139,7 +139,7 @@
         assert new_repo.description == description
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=repo_name_full))
+        response = self.app.get(base.url('summary_home', repo_name=repo_name_full))
         response.mustcontain(repo_name_full)
         response.mustcontain(self.REPO_TYPE)
 
@@ -149,7 +149,7 @@
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full)))
+            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
         except vcs.exceptions.VCSError:
             RepoGroupModel().delete(group_name)
             Session().commit()
@@ -160,41 +160,41 @@
         Session().commit()
 
     def test_create_in_group_without_needed_permissions(self):
-        usr = self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
+        usr = self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
         # avoid spurious RepoGroup DetachedInstanceError ...
         session_csrf_secret_token = self.session_csrf_secret_token()
         # revoke
         user_model = UserModel()
         # disable fork and create on default user
-        user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
-        user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
-        user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
-        user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
+        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.create.repository')
+        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.create.none')
+        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.fork.repository')
+        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.fork.none')
 
         # disable on regular user
-        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
-        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
-        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
-        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
+        user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
+        user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.none')
+        user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
+        user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
         Session().commit()
 
         ## create GROUP
-        group_name = u'reg_sometest_%s' % self.REPO_TYPE
+        group_name = 'reg_sometest_%s' % self.REPO_TYPE
         gr = RepoGroupModel().create(group_name=group_name,
-                                     group_description=u'test',
-                                     owner=TEST_USER_ADMIN_LOGIN)
+                                     group_description='test',
+                                     owner=base.TEST_USER_ADMIN_LOGIN)
         Session().commit()
 
-        group_name_allowed = u'reg_sometest_allowed_%s' % self.REPO_TYPE
+        group_name_allowed = 'reg_sometest_allowed_%s' % self.REPO_TYPE
         gr_allowed = RepoGroupModel().create(group_name=group_name_allowed,
-                                     group_description=u'test',
-                                     owner=TEST_USER_REGULAR_LOGIN)
+                                     group_description='test',
+                                     owner=base.TEST_USER_REGULAR_LOGIN)
         Session().commit()
 
-        repo_name = u'ingroup'
-        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        repo_name = 'ingroup'
+        repo_name_full = db.URL_SEP.join([group_name, repo_name])
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -205,10 +205,10 @@
         response.mustcontain('Invalid value')
 
         # user is allowed to create in this group
-        repo_name = u'ingroup'
-        repo_name_full = RepoGroup.url_sep().join([group_name_allowed, repo_name])
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        repo_name = 'ingroup'
+        repo_name_full = db.URL_SEP.join([group_name_allowed, repo_name])
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -217,8 +217,8 @@
                                                 _session_csrf_secret_token=session_csrf_secret_token))
 
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
-        assert response.json == {u'result': True}
+        response = self.app.get(base.url('repo_check_home', repo_name=repo_name_full))
+        assert response.json == {'result': True}
         self.checkSessionFlash(response,
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name_full, repo_name_full))
@@ -231,7 +231,7 @@
         assert new_repo.description == description
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=repo_name_full))
+        response = self.app.get(base.url('summary_home', repo_name=repo_name_full))
         response.mustcontain(repo_name_full)
         response.mustcontain(self.REPO_TYPE)
 
@@ -241,7 +241,7 @@
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full)))
+            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
         except vcs.exceptions.VCSError:
             RepoGroupModel().delete(group_name)
             Session().commit()
@@ -256,20 +256,20 @@
         self.log_user()
 
         ## create GROUP
-        group_name = u'sometest_%s' % self.REPO_TYPE
+        group_name = 'sometest_%s' % self.REPO_TYPE
         gr = RepoGroupModel().create(group_name=group_name,
-                                     group_description=u'test',
-                                     owner=TEST_USER_ADMIN_LOGIN)
+                                     group_description='test',
+                                     owner=base.TEST_USER_ADMIN_LOGIN)
         perm = Permission.get_by_key('repository.write')
-        RepoGroupModel().grant_user_permission(gr, TEST_USER_REGULAR_LOGIN, perm)
+        RepoGroupModel().grant_user_permission(gr, base.TEST_USER_REGULAR_LOGIN, perm)
 
         ## add repo permissions
         Session().commit()
 
-        repo_name = u'ingroup_inherited_%s' % self.REPO_TYPE
-        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        repo_name = 'ingroup_inherited_%s' % self.REPO_TYPE
+        repo_name_full = db.URL_SEP.join([group_name, repo_name])
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -279,7 +279,7 @@
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
 
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
+        response = self.app.get(base.url('repo_check_home', repo_name=repo_name_full))
         self.checkSessionFlash(response,
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name_full, repo_name_full))
@@ -292,13 +292,13 @@
         assert new_repo.description == description
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=repo_name_full))
+        response = self.app.get(base.url('summary_home', repo_name=repo_name_full))
         response.mustcontain(repo_name_full)
         response.mustcontain(self.REPO_TYPE)
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full)))
+            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
         except vcs.exceptions.VCSError:
             RepoGroupModel().delete(group_name)
             Session().commit()
@@ -309,7 +309,7 @@
             .filter(UserRepoToPerm.repository_id == new_repo_id).all()
         assert len(inherited_perms) == 2
 
-        assert TEST_USER_REGULAR_LOGIN in [x.user.username
+        assert base.TEST_USER_REGULAR_LOGIN in [x.user.username
                                                     for x in inherited_perms]
         assert 'repository.write' in [x.permission.permission_name
                                                for x in inherited_perms]
@@ -321,8 +321,8 @@
     def test_create_remote_repo_wrong_clone_uri(self):
         self.log_user()
         repo_name = self.NEW_REPO
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -334,8 +334,8 @@
     def test_create_remote_repo_wrong_clone_uri_hg_svn(self):
         self.log_user()
         repo_name = self.NEW_REPO
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -346,16 +346,16 @@
 
     def test_delete(self):
         self.log_user()
-        repo_name = u'vcs_test_new_to_delete_%s' % self.REPO_TYPE
-        description = u'description for newly created repo'
-        response = self.app.post(url('repos'),
+        repo_name = 'vcs_test_new_to_delete_%s' % self.REPO_TYPE
+        description = 'description for newly created repo'
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_type=self.REPO_TYPE,
                                                 repo_name=repo_name,
                                                 repo_description=description,
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=repo_name))
+        response = self.app.get(base.url('repo_check_home', repo_name=repo_name))
         self.checkSessionFlash(response,
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name, repo_name))
@@ -367,17 +367,17 @@
         assert new_repo.description == description
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=repo_name))
+        response = self.app.get(base.url('summary_home', repo_name=repo_name))
         response.mustcontain(repo_name)
         response.mustcontain(self.REPO_TYPE)
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)))
+            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
         except vcs.exceptions.VCSError:
             pytest.fail('no repo %s in filesystem' % repo_name)
 
-        response = self.app.post(url('delete_repo', repo_name=repo_name),
+        response = self.app.post(base.url('delete_repo', repo_name=repo_name),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name))
@@ -395,67 +395,65 @@
     def test_delete_non_ascii(self):
         self.log_user()
         non_ascii = "ąęł"
-        repo_name = "%s%s" % (safe_str(self.NEW_REPO), non_ascii)
-        repo_name_unicode = safe_unicode(repo_name)
+        repo_name = "%s%s" % (self.NEW_REPO, non_ascii)
         description = 'description for newly created repo' + non_ascii
-        description_unicode = safe_unicode(description)
-        response = self.app.post(url('repos'),
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
                                                 repo_description=description,
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=repo_name))
-        assert response.json == {u'result': True}
+        response = self.app.get(base.url('repo_check_home', repo_name=repo_name))
+        assert response.json == {'result': True}
         self.checkSessionFlash(response,
-                               u'Created repository <a href="/%s">%s</a>'
-                               % (urllib.quote(repo_name), repo_name_unicode))
+                               'Created repository <a href="/%s">%s</a>'
+                               % (urllib.parse.quote(repo_name), repo_name))
         # test if the repo was created in the database
         new_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name_unicode).one()
+            .filter(Repository.repo_name == repo_name).one()
 
-        assert new_repo.repo_name == repo_name_unicode
-        assert new_repo.description == description_unicode
+        assert new_repo.repo_name == repo_name
+        assert new_repo.description == description
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=repo_name))
+        response = self.app.get(base.url('summary_home', repo_name=repo_name))
         response.mustcontain(repo_name)
         response.mustcontain(self.REPO_TYPE)
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_unicode)))
+            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
         except vcs.exceptions.VCSError:
             pytest.fail('no repo %s in filesystem' % repo_name)
 
-        response = self.app.post(url('delete_repo', repo_name=repo_name),
+        response = self.app.post(base.url('delete_repo', repo_name=repo_name),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
-        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name_unicode))
+        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name))
         response.follow()
 
         # check if repo was deleted from db
         deleted_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name_unicode).scalar()
+            .filter(Repository.repo_name == repo_name).scalar()
 
         assert deleted_repo is None
 
-        assert os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_unicode)) == False
+        assert os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)) == False
 
     def test_delete_repo_with_group(self):
         # TODO:
         pass
 
     def test_delete_browser_fakeout(self):
-        response = self.app.post(url('delete_repo', repo_name=self.REPO),
+        response = self.app.post(base.url('delete_repo', repo_name=self.REPO),
                                  params=dict(_session_csrf_secret_token=self.session_csrf_secret_token()))
 
     def test_show(self):
         self.log_user()
-        response = self.app.get(url('summary_home', repo_name=self.REPO))
+        response = self.app.get(base.url('summary_home', repo_name=self.REPO))
 
     def test_edit(self):
-        response = self.app.get(url('edit_repo', repo_name=self.REPO))
+        response = self.app.get(base.url('edit_repo', repo_name=self.REPO))
 
     def test_set_private_flag_sets_default_to_none(self):
         self.log_user()
@@ -465,11 +463,11 @@
         assert perm[0].permission.permission_name == 'repository.read'
         assert Repository.get_by_repo_name(self.REPO).private == False
 
-        response = self.app.post(url('update_repo', repo_name=self.REPO),
+        response = self.app.post(base.url('update_repo', repo_name=self.REPO),
                         fixture._get_repo_create_params(repo_private=1,
                                                 repo_name=self.REPO,
                                                 repo_type=self.REPO_TYPE,
-                                                owner=TEST_USER_ADMIN_LOGIN,
+                                                owner=base.TEST_USER_ADMIN_LOGIN,
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         self.checkSessionFlash(response,
                                msg='Repository %s updated successfully' % (self.REPO))
@@ -480,11 +478,11 @@
         assert len(perm), 1
         assert perm[0].permission.permission_name == 'repository.none'
 
-        response = self.app.post(url('update_repo', repo_name=self.REPO),
+        response = self.app.post(base.url('update_repo', repo_name=self.REPO),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=self.REPO,
                                                 repo_type=self.REPO_TYPE,
-                                                owner=TEST_USER_ADMIN_LOGIN,
+                                                owner=base.TEST_USER_ADMIN_LOGIN,
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         self.checkSessionFlash(response,
                                msg='Repository %s updated successfully' % (self.REPO))
@@ -502,17 +500,17 @@
     def test_set_repo_fork_has_no_self_id(self):
         self.log_user()
         repo = Repository.get_by_repo_name(self.REPO)
-        response = self.app.get(url('edit_repo_advanced', repo_name=self.REPO))
+        response = self.app.get(base.url('edit_repo_advanced', repo_name=self.REPO))
         opt = """<option value="%s">%s</option>""" % (repo.repo_id, self.REPO)
         response.mustcontain(no=[opt])
 
     def test_set_fork_of_other_repo(self):
         self.log_user()
-        other_repo = u'other_%s' % self.REPO_TYPE
+        other_repo = 'other_%s' % self.REPO_TYPE
         fixture.create_repo(other_repo, repo_type=self.REPO_TYPE)
         repo = Repository.get_by_repo_name(self.REPO)
         repo2 = Repository.get_by_repo_name(other_repo)
-        response = self.app.post(url('edit_repo_advanced_fork', repo_name=self.REPO),
+        response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=repo2.repo_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
         repo = Repository.get_by_repo_name(self.REPO)
         repo2 = Repository.get_by_repo_name(other_repo)
@@ -533,7 +531,7 @@
         self.log_user()
         repo = Repository.get_by_repo_name(self.REPO)
         repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
-        response = self.app.post(url('edit_repo_advanced_fork', repo_name=self.REPO),
+        response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=repo2.repo_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
         repo = Repository.get_by_repo_name(self.REPO)
         repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
@@ -543,7 +541,7 @@
     def test_set_fork_of_none(self):
         self.log_user()
         ## mark it as None
-        response = self.app.post(url('edit_repo_advanced_fork', repo_name=self.REPO),
+        response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=None, _session_csrf_secret_token=self.session_csrf_secret_token()))
         repo = Repository.get_by_repo_name(self.REPO)
         repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
@@ -555,34 +553,34 @@
     def test_set_fork_of_same_repo(self):
         self.log_user()
         repo = Repository.get_by_repo_name(self.REPO)
-        response = self.app.post(url('edit_repo_advanced_fork', repo_name=self.REPO),
+        response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=repo.repo_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
         self.checkSessionFlash(response,
                                'An error occurred during this operation')
 
     def test_create_on_top_level_without_permissions(self):
-        usr = self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
+        usr = self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
         # revoke
         user_model = UserModel()
         # disable fork and create on default user
-        user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
-        user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
-        user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
-        user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
+        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.create.repository')
+        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.create.none')
+        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.fork.repository')
+        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.fork.none')
 
         # disable on regular user
-        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
-        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
-        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
-        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
+        user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
+        user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.none')
+        user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
+        user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
         Session().commit()
 
 
         user = User.get(usr['user_id'])
 
-        repo_name = self.NEW_REPO + u'no_perms'
+        repo_name = self.NEW_REPO + 'no_perms'
         description = 'description for newly created repo'
-        response = self.app.post(url('repos'),
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -600,7 +598,7 @@
         repo_name = self.NEW_REPO
         description = 'description for newly created repo'
 
-        response = self.app.post(url('repos'),
+        response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
                                                 repo_name=repo_name,
                                                 repo_type=self.REPO_TYPE,
@@ -618,18 +616,18 @@
 
 
 class TestAdminReposControllerGIT(_BaseTestCase):
-    REPO = GIT_REPO
+    REPO = base.GIT_REPO
     REPO_TYPE = 'git'
-    NEW_REPO = NEW_GIT_REPO
-    OTHER_TYPE_REPO = HG_REPO
+    NEW_REPO = base.NEW_GIT_REPO
+    OTHER_TYPE_REPO = base.HG_REPO
     OTHER_TYPE = 'hg'
 
 
 class TestAdminReposControllerHG(_BaseTestCase):
-    REPO = HG_REPO
+    REPO = base.HG_REPO
     REPO_TYPE = 'hg'
-    NEW_REPO = NEW_HG_REPO
-    OTHER_TYPE_REPO = GIT_REPO
+    NEW_REPO = base.NEW_HG_REPO
+    OTHER_TYPE_REPO = base.GIT_REPO
     OTHER_TYPE = 'git'
 
     def test_permanent_url_protocol_access(self):
@@ -637,7 +635,7 @@
         permanent_name = '_%d' % repo.repo_id
 
         # 400 Bad Request - Unable to detect pull/push action
-        self.app.get(url('summary_home', repo_name=permanent_name),
+        self.app.get(base.url('summary_home', repo_name=permanent_name),
             extra_environ={'HTTP_ACCEPT': 'application/mercurial'},
             status=400,
         )
--- a/kallithea/tests/functional/test_admin_settings.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_settings.py	Sat May 02 21:20:43 2020 +0200
@@ -1,54 +1,54 @@
 # -*- coding: utf-8 -*-
 
 from kallithea.model.db import Setting, Ui
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestAdminSettingsController(TestController):
+class TestAdminSettingsController(base.TestController):
 
     def test_index_main(self):
         self.log_user()
-        response = self.app.get(url('admin_settings'))
+        response = self.app.get(base.url('admin_settings'))
 
     def test_index_mapping(self):
         self.log_user()
-        response = self.app.get(url('admin_settings_mapping'))
+        response = self.app.get(base.url('admin_settings_mapping'))
 
     def test_index_global(self):
         self.log_user()
-        response = self.app.get(url('admin_settings_global'))
+        response = self.app.get(base.url('admin_settings_global'))
 
     def test_index_visual(self):
         self.log_user()
-        response = self.app.get(url('admin_settings_visual'))
+        response = self.app.get(base.url('admin_settings_visual'))
 
     def test_index_email(self):
         self.log_user()
-        response = self.app.get(url('admin_settings_email'))
+        response = self.app.get(base.url('admin_settings_email'))
 
     def test_index_hooks(self):
         self.log_user()
-        response = self.app.get(url('admin_settings_hooks'))
+        response = self.app.get(base.url('admin_settings_hooks'))
 
     def test_create_custom_hook(self):
         self.log_user()
-        response = self.app.post(url('admin_settings_hooks'),
+        response = self.app.post(base.url('admin_settings_hooks'),
                                 params=dict(new_hook_ui_key='test_hooks_1',
-                                            new_hook_ui_value='cd %s' % TESTS_TMP_PATH,
+                                            new_hook_ui_value='cd %s' % base.TESTS_TMP_PATH,
                                             _session_csrf_secret_token=self.session_csrf_secret_token()))
 
         self.checkSessionFlash(response, 'Added new hook')
         response = response.follow()
         response.mustcontain('test_hooks_1')
-        response.mustcontain('cd %s' % TESTS_TMP_PATH)
+        response.mustcontain('cd %s' % base.TESTS_TMP_PATH)
 
     def test_edit_custom_hook(self):
         self.log_user()
-        response = self.app.post(url('admin_settings_hooks'),
+        response = self.app.post(base.url('admin_settings_hooks'),
                                 params=dict(hook_ui_key='test_hooks_1',
                                             hook_ui_value='old_value_of_hook_1',
                                             hook_ui_value_new='new_value_of_hook_1',
@@ -60,7 +60,7 @@
 
     def test_add_existing_custom_hook(self):
         self.log_user()
-        response = self.app.post(url('admin_settings_hooks'),
+        response = self.app.post(base.url('admin_settings_hooks'),
                                 params=dict(new_hook_ui_key='test_hooks_1',
                                             new_hook_ui_value='attempted_new_value',
                                             _session_csrf_secret_token=self.session_csrf_secret_token()))
@@ -72,27 +72,27 @@
 
     def test_create_custom_hook_delete(self):
         self.log_user()
-        response = self.app.post(url('admin_settings_hooks'),
+        response = self.app.post(base.url('admin_settings_hooks'),
                                 params=dict(new_hook_ui_key='test_hooks_2',
-                                            new_hook_ui_value='cd %s2' % TESTS_TMP_PATH,
+                                            new_hook_ui_value='cd %s2' % base.TESTS_TMP_PATH,
                                             _session_csrf_secret_token=self.session_csrf_secret_token()))
 
         self.checkSessionFlash(response, 'Added new hook')
         response = response.follow()
         response.mustcontain('test_hooks_2')
-        response.mustcontain('cd %s2' % TESTS_TMP_PATH)
+        response.mustcontain('cd %s2' % base.TESTS_TMP_PATH)
 
         hook_id = Ui.get_by_key('hooks', 'test_hooks_2').ui_id
         ## delete
-        self.app.post(url('admin_settings_hooks'),
+        self.app.post(base.url('admin_settings_hooks'),
                         params=dict(hook_id=hook_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
-        response = self.app.get(url('admin_settings_hooks'))
+        response = self.app.get(base.url('admin_settings_hooks'))
         response.mustcontain(no=['test_hooks_2'])
-        response.mustcontain(no=['cd %s2' % TESTS_TMP_PATH])
+        response.mustcontain(no=['cd %s2' % base.TESTS_TMP_PATH])
 
     def test_add_existing_builtin_hook(self):
         self.log_user()
-        response = self.app.post(url('admin_settings_hooks'),
+        response = self.app.post(base.url('admin_settings_hooks'),
                                 params=dict(new_hook_ui_key='changegroup.update',
                                             new_hook_ui_value='attempted_new_value',
                                             _session_csrf_secret_token=self.session_csrf_secret_token()))
@@ -104,18 +104,18 @@
 
     def test_index_search(self):
         self.log_user()
-        response = self.app.get(url('admin_settings_search'))
+        response = self.app.get(base.url('admin_settings_search'))
 
     def test_index_system(self):
         self.log_user()
-        response = self.app.get(url('admin_settings_system'))
+        response = self.app.get(base.url('admin_settings_system'))
 
     def test_ga_code_active(self):
         self.log_user()
         old_title = 'Kallithea'
         old_realm = 'Kallithea authentication'
         new_ga_code = 'ga-test-123456789'
-        response = self.app.post(url('admin_settings_global'),
+        response = self.app.post(base.url('admin_settings_global'),
                         params=dict(title=old_title,
                                  realm=old_realm,
                                  ga_code=new_ga_code,
@@ -136,7 +136,7 @@
         old_title = 'Kallithea'
         old_realm = 'Kallithea authentication'
         new_ga_code = ''
-        response = self.app.post(url('admin_settings_global'),
+        response = self.app.post(base.url('admin_settings_global'),
                         params=dict(title=old_title,
                                  realm=old_realm,
                                  ga_code=new_ga_code,
@@ -156,7 +156,7 @@
         old_title = 'Kallithea'
         old_realm = 'Kallithea authentication'
         new_ga_code = ''
-        response = self.app.post(url('admin_settings_global'),
+        response = self.app.post(base.url('admin_settings_global'),
                         params=dict(title=old_title,
                                  realm=old_realm,
                                  ga_code=new_ga_code,
@@ -168,7 +168,7 @@
         self.checkSessionFlash(response, 'Updated application settings')
         assert Setting.get_app_settings()['captcha_private_key'] == '1234567890'
 
-        response = self.app.get(url('register'))
+        response = self.app.get(base.url('register'))
         response.mustcontain('captcha')
 
     def test_captcha_deactivate(self):
@@ -176,7 +176,7 @@
         old_title = 'Kallithea'
         old_realm = 'Kallithea authentication'
         new_ga_code = ''
-        response = self.app.post(url('admin_settings_global'),
+        response = self.app.post(base.url('admin_settings_global'),
                         params=dict(title=old_title,
                                  realm=old_realm,
                                  ga_code=new_ga_code,
@@ -188,7 +188,7 @@
         self.checkSessionFlash(response, 'Updated application settings')
         assert Setting.get_app_settings()['captcha_private_key'] == ''
 
-        response = self.app.get(url('register'))
+        response = self.app.get(base.url('register'))
         response.mustcontain(no=['captcha'])
 
     def test_title_change(self):
@@ -198,7 +198,7 @@
         old_realm = 'Kallithea authentication'
 
         for new_title in ['Changed', 'Żółwik', old_title]:
-            response = self.app.post(url('admin_settings_global'),
+            response = self.app.post(base.url('admin_settings_global'),
                         params=dict(title=new_title,
                                  realm=old_realm,
                                  ga_code='',
@@ -208,7 +208,7 @@
                                 ))
 
             self.checkSessionFlash(response, 'Updated application settings')
-            assert Setting.get_app_settings()['title'] == new_title.decode('utf-8')
+            assert Setting.get_app_settings()['title'] == new_title
 
             response = response.follow()
             response.mustcontain("""<span class="branding">%s</span>""" % new_title)
--- a/kallithea/tests/functional/test_admin_user_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_user_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -1,25 +1,25 @@
 # -*- coding: utf-8 -*-
 from kallithea.model.db import Permission, UserGroup, UserGroupToPerm
 from kallithea.model.meta import Session
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-TEST_USER_GROUP = u'admins_test'
+TEST_USER_GROUP = 'admins_test'
 
 
-class TestAdminUsersGroupsController(TestController):
+class TestAdminUsersGroupsController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url('users_groups'))
+        response = self.app.get(base.url('users_groups'))
         # Test response...
 
     def test_create(self):
         self.log_user()
         users_group_name = TEST_USER_GROUP
-        response = self.app.post(url('users_groups'),
+        response = self.app.post(base.url('users_groups'),
                                  {'users_group_name': users_group_name,
-                                  'user_group_description': u'DESC',
+                                  'user_group_description': 'DESC',
                                   'active': True,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.follow()
@@ -30,21 +30,21 @@
                                '/edit">%s</a>' % TEST_USER_GROUP)
 
     def test_new(self):
-        response = self.app.get(url('new_users_group'))
+        response = self.app.get(base.url('new_users_group'))
 
     def test_update(self):
-        response = self.app.post(url('update_users_group', id=1), status=403)
+        response = self.app.post(base.url('update_users_group', id=1), status=403)
 
     def test_update_browser_fakeout(self):
-        response = self.app.post(url('update_users_group', id=1),
+        response = self.app.post(base.url('update_users_group', id=1),
                                  params=dict(_session_csrf_secret_token=self.session_csrf_secret_token()))
 
     def test_delete(self):
         self.log_user()
         users_group_name = TEST_USER_GROUP + 'another'
-        response = self.app.post(url('users_groups'),
+        response = self.app.post(base.url('users_groups'),
                                  {'users_group_name': users_group_name,
-                                  'user_group_description': u'DESC',
+                                  'user_group_description': 'DESC',
                                   'active': True,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.follow()
@@ -55,7 +55,7 @@
         gr = Session().query(UserGroup) \
             .filter(UserGroup.users_group_name == users_group_name).one()
 
-        response = self.app.post(url('delete_users_group', id=gr.users_group_id),
+        response = self.app.post(base.url('delete_users_group', id=gr.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         gr = Session().query(UserGroup) \
@@ -66,9 +66,9 @@
     def test_default_perms_enable_repository_read_on_group(self):
         self.log_user()
         users_group_name = TEST_USER_GROUP + 'another2'
-        response = self.app.post(url('users_groups'),
+        response = self.app.post(base.url('users_groups'),
                                  {'users_group_name': users_group_name,
-                                  'user_group_description': u'DESC',
+                                  'user_group_description': 'DESC',
                                   'active': True,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.follow()
@@ -77,7 +77,7 @@
         self.checkSessionFlash(response,
                                'Created user group ')
         ## ENABLE REPO CREATE ON A GROUP
-        response = self.app.post(url('edit_user_group_default_perms_update',
+        response = self.app.post(base.url('edit_user_group_default_perms_update',
                                      id=ug.users_group_id),
                                  {'create_repo_perm': True,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -97,7 +97,7 @@
 
         ## DISABLE REPO CREATE ON A GROUP
         response = self.app.post(
-            url('edit_user_group_default_perms_update', id=ug.users_group_id),
+            base.url('edit_user_group_default_perms_update', id=ug.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         response.follow()
@@ -118,7 +118,7 @@
         # DELETE !
         ug = UserGroup.get_by_group_name(users_group_name)
         ugid = ug.users_group_id
-        response = self.app.post(url('delete_users_group', id=ug.users_group_id),
+        response = self.app.post(base.url('delete_users_group', id=ug.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         response = response.follow()
         gr = Session().query(UserGroup) \
@@ -135,9 +135,9 @@
     def test_default_perms_enable_repository_fork_on_group(self):
         self.log_user()
         users_group_name = TEST_USER_GROUP + 'another2'
-        response = self.app.post(url('users_groups'),
+        response = self.app.post(base.url('users_groups'),
                                  {'users_group_name': users_group_name,
-                                  'user_group_description': u'DESC',
+                                  'user_group_description': 'DESC',
                                   'active': True,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.follow()
@@ -146,7 +146,7 @@
         self.checkSessionFlash(response,
                                'Created user group ')
         ## ENABLE REPO CREATE ON A GROUP
-        response = self.app.post(url('edit_user_group_default_perms_update',
+        response = self.app.post(base.url('edit_user_group_default_perms_update',
                                      id=ug.users_group_id),
                                  {'fork_repo_perm': True, '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
@@ -165,7 +165,7 @@
                     [ug.users_group_id, p3.permission_id]])
 
         ## DISABLE REPO CREATE ON A GROUP
-        response = self.app.post(url('edit_user_group_default_perms_update', id=ug.users_group_id),
+        response = self.app.post(base.url('edit_user_group_default_perms_update', id=ug.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         response.follow()
@@ -185,7 +185,7 @@
         # DELETE !
         ug = UserGroup.get_by_group_name(users_group_name)
         ugid = ug.users_group_id
-        response = self.app.post(url('delete_users_group', id=ug.users_group_id),
+        response = self.app.post(base.url('delete_users_group', id=ug.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         response = response.follow()
         gr = Session().query(UserGroup) \
@@ -201,5 +201,5 @@
         assert perms == []
 
     def test_delete_browser_fakeout(self):
-        response = self.app.post(url('delete_users_group', id=1),
+        response = self.app.post(base.url('delete_users_group', id=1),
                                  params=dict(_session_csrf_secret_token=self.session_csrf_secret_token()))
--- a/kallithea/tests/functional/test_admin_users.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_admin_users.py	Sat May 02 21:20:43 2020 +0200
@@ -17,6 +17,7 @@
 from tg.util.webtest import test_context
 from webob.exc import HTTPNotFound
 
+import kallithea
 from kallithea.controllers.admin.users import UsersController
 from kallithea.lib import helpers as h
 from kallithea.lib.auth import check_password
@@ -24,7 +25,7 @@
 from kallithea.model.db import Permission, RepoGroup, User, UserApiKeys, UserSshKeys
 from kallithea.model.meta import Session
 from kallithea.model.user import UserModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
@@ -34,7 +35,7 @@
 @pytest.fixture
 def user_and_repo_group_fail():
     username = 'repogrouperr'
-    groupname = u'repogroup_fail'
+    groupname = 'repogroup_fail'
     user = fixture.create_user(name=username)
     repo_group = fixture.create_repo_group(name=groupname, cur_user=username)
     yield user, repo_group
@@ -43,7 +44,7 @@
         fixture.destroy_repo_group(repo_group)
 
 
-class TestAdminUsersController(TestController):
+class TestAdminUsersController(base.TestController):
     test_user_1 = 'testme'
 
     @classmethod
@@ -54,7 +55,7 @@
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url('users'))
+        response = self.app.get(base.url('users'))
         # TODO: Test response...
 
     def test_create(self):
@@ -62,11 +63,11 @@
         username = 'newtestuser'
         password = 'test12'
         password_confirmation = password
-        name = u'name'
-        lastname = u'lastname'
+        name = 'name'
+        lastname = 'lastname'
         email = 'mail@example.com'
 
-        response = self.app.post(url('new_user'),
+        response = self.app.post(base.url('new_user'),
             {'username': username,
              'password': password,
              'password_confirmation': password_confirmation,
@@ -98,11 +99,11 @@
         self.log_user()
         username = 'new_user'
         password = ''
-        name = u'name'
-        lastname = u'lastname'
+        name = 'name'
+        lastname = 'lastname'
         email = 'errmail.example.com'
 
-        response = self.app.post(url('new_user'),
+        response = self.app.post(base.url('new_user'),
             {'username': username,
              'password': password,
              'name': name,
@@ -126,9 +127,9 @@
 
     def test_new(self):
         self.log_user()
-        response = self.app.get(url('new_user'))
+        response = self.app.get(base.url('new_user'))
 
-    @parametrize('name,attrs',
+    @base.parametrize('name,attrs',
         [('firstname', {'firstname': 'new_username'}),
          ('lastname', {'lastname': 'new_username'}),
          ('admin', {'admin': True}),
@@ -167,7 +168,7 @@
             # not filled so we use creation data
 
         params.update({'_session_csrf_secret_token': self.session_csrf_secret_token()})
-        response = self.app.post(url('update_user', id=usr.user_id), params)
+        response = self.app.post(base.url('update_user', id=usr.user_id), params)
         self.checkSessionFlash(response, 'User updated successfully')
         params.pop('_session_csrf_secret_token')
 
@@ -186,7 +187,7 @@
 
         new_user = Session().query(User) \
             .filter(User.username == username).one()
-        response = self.app.post(url('delete_user', id=new_user.user_id),
+        response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         self.checkSessionFlash(response, 'Successfully deleted user')
@@ -194,25 +195,25 @@
     def test_delete_repo_err(self):
         self.log_user()
         username = 'repoerr'
-        reponame = u'repoerr_fail'
+        reponame = 'repoerr_fail'
 
         fixture.create_user(name=username)
         fixture.create_repo(name=reponame, cur_user=username)
 
         new_user = Session().query(User) \
             .filter(User.username == username).one()
-        response = self.app.post(url('delete_user', id=new_user.user_id),
+        response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
-        self.checkSessionFlash(response, 'User "%s" still '
+        self.checkSessionFlash(response, 'User &quot;%s&quot; still '
                                'owns 1 repositories and cannot be removed. '
                                'Switch owners or remove those repositories: '
                                '%s' % (username, reponame))
 
-        response = self.app.post(url('delete_repo', repo_name=reponame),
+        response = self.app.post(base.url('delete_repo', repo_name=reponame),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Deleted repository %s' % reponame)
 
-        response = self.app.post(url('delete_user', id=new_user.user_id),
+        response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Successfully deleted user')
 
@@ -223,56 +224,56 @@
 
         self.log_user()
 
-        response = self.app.post(url('delete_user', id=new_user.user_id),
+        response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
-        self.checkSessionFlash(response, 'User "%s" still '
+        self.checkSessionFlash(response, 'User &quot;%s&quot; still '
                                'owns 1 repository groups and cannot be removed. '
                                'Switch owners or remove those repository groups: '
                                '%s' % (username, groupname))
 
         # Relevant _if_ the user deletion succeeded to make sure we can render groups without owner
         # rg = RepoGroup.get_by_group_name(group_name=groupname)
-        # response = self.app.get(url('repos_groups', id=rg.group_id))
+        # response = self.app.get(base.url('repos_groups', id=rg.group_id))
 
-        response = self.app.post(url('delete_repo_group', group_name=groupname),
+        response = self.app.post(base.url('delete_repo_group', group_name=groupname),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Removed repository group %s' % groupname)
 
-        response = self.app.post(url('delete_user', id=new_user.user_id),
+        response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Successfully deleted user')
 
     def test_delete_user_group_err(self):
         self.log_user()
         username = 'usergrouperr'
-        groupname = u'usergroup_fail'
+        groupname = 'usergroup_fail'
 
         fixture.create_user(name=username)
         ug = fixture.create_user_group(name=groupname, cur_user=username)
 
         new_user = Session().query(User) \
             .filter(User.username == username).one()
-        response = self.app.post(url('delete_user', id=new_user.user_id),
+        response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
-        self.checkSessionFlash(response, 'User "%s" still '
+        self.checkSessionFlash(response, 'User &quot;%s&quot; still '
                                'owns 1 user groups and cannot be removed. '
                                'Switch owners or remove those user groups: '
                                '%s' % (username, groupname))
 
         # TODO: why do this fail?
-        #response = self.app.delete(url('delete_users_group', id=groupname))
+        #response = self.app.delete(base.url('delete_users_group', id=groupname))
         #self.checkSessionFlash(response, 'Removed user group %s' % groupname)
 
         fixture.destroy_user_group(ug.users_group_id)
 
-        response = self.app.post(url('delete_user', id=new_user.user_id),
+        response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Successfully deleted user')
 
     def test_edit(self):
         self.log_user()
-        user = User.get_by_username(TEST_USER_ADMIN_LOGIN)
-        response = self.app.get(url('edit_user', id=user.user_id))
+        user = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        response = self.app.get(base.url('edit_user', id=user.user_id))
 
     def test_add_perm_create_repo(self):
         self.log_user()
@@ -280,8 +281,8 @@
         perm_create = Permission.get_by_key('hg.create.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
-                                            email='dummy', firstname=u'a',
-                                            lastname=u'b')
+                                            email='dummy', firstname='a',
+                                            lastname='b')
         Session().commit()
         uid = user.user_id
 
@@ -290,7 +291,7 @@
             assert UserModel().has_perm(user, perm_none) == False
             assert UserModel().has_perm(user, perm_create) == False
 
-            response = self.app.post(url('edit_user_perms_update', id=uid),
+            response = self.app.post(base.url('edit_user_perms_update', id=uid),
                                      params=dict(create_repo_perm=True,
                                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
 
@@ -310,8 +311,8 @@
         perm_create = Permission.get_by_key('hg.create.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
-                                            email='dummy', firstname=u'a',
-                                            lastname=u'b')
+                                            email='dummy', firstname='a',
+                                            lastname='b')
         Session().commit()
         uid = user.user_id
 
@@ -320,7 +321,7 @@
             assert UserModel().has_perm(user, perm_none) == False
             assert UserModel().has_perm(user, perm_create) == False
 
-            response = self.app.post(url('edit_user_perms_update', id=uid),
+            response = self.app.post(base.url('edit_user_perms_update', id=uid),
                                      params=dict(_session_csrf_secret_token=self.session_csrf_secret_token()))
 
             perm_none = Permission.get_by_key('hg.create.none')
@@ -339,8 +340,8 @@
         perm_fork = Permission.get_by_key('hg.fork.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
-                                            email='dummy', firstname=u'a',
-                                            lastname=u'b')
+                                            email='dummy', firstname='a',
+                                            lastname='b')
         Session().commit()
         uid = user.user_id
 
@@ -349,7 +350,7 @@
             assert UserModel().has_perm(user, perm_none) == False
             assert UserModel().has_perm(user, perm_fork) == False
 
-            response = self.app.post(url('edit_user_perms_update', id=uid),
+            response = self.app.post(base.url('edit_user_perms_update', id=uid),
                                      params=dict(create_repo_perm=True,
                                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
 
@@ -369,8 +370,8 @@
         perm_fork = Permission.get_by_key('hg.fork.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
-                                            email='dummy', firstname=u'a',
-                                            lastname=u'b')
+                                            email='dummy', firstname='a',
+                                            lastname='b')
         Session().commit()
         uid = user.user_id
 
@@ -379,7 +380,7 @@
             assert UserModel().has_perm(user, perm_none) == False
             assert UserModel().has_perm(user, perm_fork) == False
 
-            response = self.app.post(url('edit_user_perms_update', id=uid),
+            response = self.app.post(base.url('edit_user_perms_update', id=uid),
                                      params=dict(_session_csrf_secret_token=self.session_csrf_secret_token()))
 
             perm_none = Permission.get_by_key('hg.create.none')
@@ -394,11 +395,11 @@
 
     def test_ips(self):
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
-        response = self.app.get(url('edit_user_ips', id=user.user_id))
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        response = self.app.get(base.url('edit_user_ips', id=user.user_id))
         response.mustcontain('All IP addresses are allowed')
 
-    @parametrize('test_name,ip,ip_range,failure', [
+    @base.parametrize('test_name,ip,ip_range,failure', [
         ('127/24', '127.0.0.1/24', '127.0.0.0 - 127.0.0.255', False),
         ('10/32', '10.0.0.10/32', '10.0.0.10 - 10.0.0.10', False),
         ('0/16', '0.0.0.0/16', '0.0.0.0 - 0.0.255.255', False),
@@ -408,26 +409,26 @@
     ])
     def test_add_ip(self, test_name, ip, ip_range, failure, auto_clear_ip_permissions):
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
-        response = self.app.post(url('edit_user_ips_update', id=user_id),
+        response = self.app.post(base.url('edit_user_ips_update', id=user_id),
                                  params=dict(new_ip=ip, _session_csrf_secret_token=self.session_csrf_secret_token()))
 
         if failure:
             self.checkSessionFlash(response, 'Please enter a valid IPv4 or IPv6 address')
-            response = self.app.get(url('edit_user_ips', id=user_id))
+            response = self.app.get(base.url('edit_user_ips', id=user_id))
             response.mustcontain(no=[ip])
             response.mustcontain(no=[ip_range])
 
         else:
-            response = self.app.get(url('edit_user_ips', id=user_id))
+            response = self.app.get(base.url('edit_user_ips', id=user_id))
             response.mustcontain(ip)
             response.mustcontain(ip_range)
 
     def test_delete_ip(self, auto_clear_ip_permissions):
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
         ip = '127.0.0.1/32'
         ip_range = '127.0.0.1 - 127.0.0.1'
@@ -436,14 +437,14 @@
             Session().commit()
         new_ip_id = new_ip.ip_id
 
-        response = self.app.get(url('edit_user_ips', id=user_id))
+        response = self.app.get(base.url('edit_user_ips', id=user_id))
         response.mustcontain(ip)
         response.mustcontain(ip_range)
 
-        self.app.post(url('edit_user_ips_delete', id=user_id),
+        self.app.post(base.url('edit_user_ips_delete', id=user_id),
                       params=dict(del_ip_id=new_ip_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
 
-        response = self.app.get(url('edit_user_ips', id=user_id))
+        response = self.app.get(base.url('edit_user_ips', id=user_id))
         response.mustcontain('All IP addresses are allowed')
         response.mustcontain(no=[ip])
         response.mustcontain(no=[ip_range])
@@ -451,22 +452,22 @@
     def test_api_keys(self):
         self.log_user()
 
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
-        response = self.app.get(url('edit_user_api_keys', id=user.user_id))
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        response = self.app.get(base.url('edit_user_api_keys', id=user.user_id))
         response.mustcontain(user.api_key)
         response.mustcontain('Expires: Never')
 
-    @parametrize('desc,lifetime', [
+    @base.parametrize('desc,lifetime', [
         ('forever', -1),
         ('5mins', 60*5),
         ('30days', 60*60*24*30),
     ])
     def test_add_api_keys(self, desc, lifetime):
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
-        response = self.app.post(url('edit_user_api_keys_update', id=user_id),
+        response = self.app.post(base.url('edit_user_api_keys_update', id=user_id),
                  {'description': desc, 'lifetime': lifetime, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully created')
         try:
@@ -481,10 +482,10 @@
 
     def test_remove_api_key(self):
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
-        response = self.app.post(url('edit_user_api_keys_update', id=user_id),
+        response = self.app.post(base.url('edit_user_api_keys_update', id=user_id),
                 {'description': 'desc', 'lifetime': -1, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully created')
         response = response.follow()
@@ -493,7 +494,7 @@
         keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
         assert 1 == len(keys)
 
-        response = self.app.post(url('edit_user_api_keys_delete', id=user_id),
+        response = self.app.post(base.url('edit_user_api_keys_delete', id=user_id),
                  {'del_api_key': keys[0].api_key, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully deleted')
         keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
@@ -501,29 +502,29 @@
 
     def test_reset_main_api_key(self):
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
         api_key = user.api_key
-        response = self.app.get(url('edit_user_api_keys', id=user_id))
+        response = self.app.get(base.url('edit_user_api_keys', id=user_id))
         response.mustcontain(api_key)
         response.mustcontain('Expires: Never')
 
-        response = self.app.post(url('edit_user_api_keys_delete', id=user_id),
+        response = self.app.post(base.url('edit_user_api_keys_delete', id=user_id),
                  {'del_api_key_builtin': api_key, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully reset')
         response = response.follow()
         response.mustcontain(no=[api_key])
 
     def test_add_ssh_key(self):
-        description = u'something'
-        public_key = u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
-        fingerprint = u'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
+        description = 'something'
+        public_key = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
+        fingerprint = 'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
 
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
-        response = self.app.post(url('edit_user_ssh_keys', id=user_id),
+        response = self.app.post(base.url('edit_user_ssh_keys', id=user_id),
                                  {'description': description,
                                   'public_key': public_key,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -538,24 +539,24 @@
         Session().commit()
 
     def test_remove_ssh_key(self):
-        description = u''
-        public_key = u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
-        fingerprint = u'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
+        description = ''
+        public_key = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
+        fingerprint = 'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
 
         self.log_user()
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
-        response = self.app.post(url('edit_user_ssh_keys', id=user_id),
+        response = self.app.post(base.url('edit_user_ssh_keys', id=user_id),
                                  {'description': description,
                                   'public_key': public_key,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'SSH key %s successfully added' % fingerprint)
         response.follow()
         ssh_key = UserSshKeys.query().filter(UserSshKeys.user_id == user_id).one()
-        assert ssh_key.description == u'me@localhost'
+        assert ssh_key.description == 'me@localhost'
 
-        response = self.app.post(url('edit_user_ssh_keys_delete', id=user_id),
+        response = self.app.post(base.url('edit_user_ssh_keys_delete', id=user_id),
                                  {'del_public_key_fingerprint': ssh_key.fingerprint,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'SSH key successfully deleted')
@@ -563,7 +564,7 @@
         assert 0 == len(keys)
 
 
-class TestAdminUsersController_unittest(TestController):
+class TestAdminUsersController_unittest(base.TestController):
     """ Unit tests for the users controller """
 
     def test_get_user_or_raise_if_default(self, monkeypatch, test_context_fixture):
@@ -574,14 +575,14 @@
 
         u = UsersController()
         # a regular user should work correctly
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         assert u._get_user_or_raise_if_default(user.user_id) == user
         # the default user should raise
         with pytest.raises(HTTPNotFound):
-            u._get_user_or_raise_if_default(User.get_default_user().user_id)
+            u._get_user_or_raise_if_default(kallithea.DEFAULT_USER_ID)
 
 
-class TestAdminUsersControllerForDefaultUser(TestController):
+class TestAdminUsersControllerForDefaultUser(base.TestController):
     """
     Edit actions on the default user are not allowed.
     Validate that they throw a 404 exception.
@@ -589,59 +590,59 @@
     def test_edit_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.get(url('edit_user', id=user.user_id), status=404)
+        response = self.app.get(base.url('edit_user', id=user.user_id), status=404)
 
     def test_edit_advanced_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.get(url('edit_user_advanced', id=user.user_id), status=404)
+        response = self.app.get(base.url('edit_user_advanced', id=user.user_id), status=404)
 
     # API keys
     def test_edit_api_keys_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.get(url('edit_user_api_keys', id=user.user_id), status=404)
+        response = self.app.get(base.url('edit_user_api_keys', id=user.user_id), status=404)
 
     def test_add_api_keys_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.post(url('edit_user_api_keys_update', id=user.user_id),
+        response = self.app.post(base.url('edit_user_api_keys_update', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     def test_delete_api_keys_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.post(url('edit_user_api_keys_delete', id=user.user_id),
+        response = self.app.post(base.url('edit_user_api_keys_delete', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     # Permissions
     def test_edit_perms_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.get(url('edit_user_perms', id=user.user_id), status=404)
+        response = self.app.get(base.url('edit_user_perms', id=user.user_id), status=404)
 
     def test_update_perms_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.post(url('edit_user_perms_update', id=user.user_id),
+        response = self.app.post(base.url('edit_user_perms_update', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     # Emails
     def test_edit_emails_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.get(url('edit_user_emails', id=user.user_id), status=404)
+        response = self.app.get(base.url('edit_user_emails', id=user.user_id), status=404)
 
     def test_add_emails_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.post(url('edit_user_emails_update', id=user.user_id),
+        response = self.app.post(base.url('edit_user_emails_update', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     def test_delete_emails_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.post(url('edit_user_emails_delete', id=user.user_id),
+        response = self.app.post(base.url('edit_user_emails_delete', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     # IP addresses
@@ -650,4 +651,4 @@
     def test_edit_ip_default_user(self):
         self.log_user()
         user = User.get_default_user()
-        response = self.app.get(url('edit_user_ips', id=user.user_id), status=404)
+        response = self.app.get(base.url('edit_user_ips', id=user.user_id), status=404)
--- a/kallithea/tests/functional/test_changelog.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_changelog.py	Sat May 02 21:20:43 2020 +0200
@@ -1,12 +1,12 @@
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestChangelogController(TestController):
+class TestChangelogController(base.TestController):
 
     def test_index_hg(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
-                                    repo_name=HG_REPO))
+        response = self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.HG_REPO))
 
         response.mustcontain('''id="chg_20" class="mergerow"''')
         response.mustcontain(
@@ -17,7 +17,7 @@
         )
         # rev 640: code garden
         response.mustcontain(
-            """<a class="changeset_hash" href="/%s/changeset/0a4e54a4460401d6dbbd6a3604b17cd2b3606b82">r640:0a4e54a44604</a>""" % HG_REPO
+            """<a class="changeset_hash" href="/%s/changeset/0a4e54a4460401d6dbbd6a3604b17cd2b3606b82">r640:0a4e54a44604</a>""" % base.HG_REPO
         )
         response.mustcontain("""code garden""")
 
@@ -26,18 +26,18 @@
     def test_index_pagination_hg(self):
         self.log_user()
         # pagination
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=HG_REPO), {'page': 1})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=HG_REPO), {'page': 2})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=HG_REPO), {'page': 3})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=HG_REPO), {'page': 4})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=HG_REPO), {'page': 5})
-        response = self.app.get(url(controller='changelog', action='index',
-                                    repo_name=HG_REPO), {'page': 6, 'size': 20})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.HG_REPO), {'page': 1})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.HG_REPO), {'page': 2})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.HG_REPO), {'page': 3})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.HG_REPO), {'page': 4})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.HG_REPO), {'page': 5})
+        response = self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.HG_REPO), {'page': 6, 'size': 20})
 
         # Test response after pagination...
         response.mustcontain(
@@ -53,8 +53,8 @@
 
     def test_index_git(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
-                                    repo_name=GIT_REPO))
+        response = self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.GIT_REPO))
 
         response.mustcontain('''id="chg_20" class=""''') # why no mergerow for git?
         response.mustcontain(
@@ -82,18 +82,18 @@
     def test_index_pagination_git(self):
         self.log_user()
         # pagination
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=GIT_REPO), {'page': 1})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=GIT_REPO), {'page': 2})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=GIT_REPO), {'page': 3})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=GIT_REPO), {'page': 4})
-        self.app.get(url(controller='changelog', action='index',
-                                    repo_name=GIT_REPO), {'page': 5})
-        response = self.app.get(url(controller='changelog', action='index',
-                                    repo_name=GIT_REPO), {'page': 6, 'size': 20})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.GIT_REPO), {'page': 1})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.GIT_REPO), {'page': 2})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.GIT_REPO), {'page': 3})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.GIT_REPO), {'page': 4})
+        self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.GIT_REPO), {'page': 5})
+        response = self.app.get(base.url(controller='changelog', action='index',
+                                    repo_name=base.GIT_REPO), {'page': 6, 'size': 20})
 
         # Test response after pagination...
         response.mustcontain(
@@ -109,9 +109,9 @@
 
     def test_index_hg_with_filenode(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
+        response = self.app.get(base.url(controller='changelog', action='index',
                                     revision='tip', f_path='/vcs/exceptions.py',
-                                    repo_name=HG_REPO))
+                                    repo_name=base.HG_REPO))
         # history commits messages
         response.mustcontain('Added exceptions module, this time for real')
         response.mustcontain('Added not implemented hg backend test case')
@@ -120,9 +120,9 @@
 
     def test_index_git_with_filenode(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
+        response = self.app.get(base.url(controller='changelog', action='index',
                                     revision='tip', f_path='/vcs/exceptions.py',
-                                    repo_name=GIT_REPO))
+                                    repo_name=base.GIT_REPO))
         # history commits messages
         response.mustcontain('Added exceptions module, this time for real')
         response.mustcontain('Added not implemented hg backend test case')
@@ -130,28 +130,28 @@
 
     def test_index_hg_with_filenode_that_is_dirnode(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
+        response = self.app.get(base.url(controller='changelog', action='index',
                                     revision='tip', f_path='/tests',
-                                    repo_name=HG_REPO))
+                                    repo_name=base.HG_REPO))
         assert response.status == '302 Found'
 
     def test_index_git_with_filenode_that_is_dirnode(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
+        response = self.app.get(base.url(controller='changelog', action='index',
                                     revision='tip', f_path='/tests',
-                                    repo_name=GIT_REPO))
+                                    repo_name=base.GIT_REPO))
         assert response.status == '302 Found'
 
     def test_index_hg_with_filenode_not_existing(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
+        response = self.app.get(base.url(controller='changelog', action='index',
                                     revision='tip', f_path='/wrong_path',
-                                    repo_name=HG_REPO))
+                                    repo_name=base.HG_REPO))
         assert response.status == '302 Found'
 
     def test_index_git_with_filenode_not_existing(self):
         self.log_user()
-        response = self.app.get(url(controller='changelog', action='index',
+        response = self.app.get(base.url(controller='changelog', action='index',
                                     revision='tip', f_path='/wrong_path',
-                                    repo_name=GIT_REPO))
+                                    repo_name=base.GIT_REPO))
         assert response.status == '302 Found'
--- a/kallithea/tests/functional/test_changeset.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_changeset.py	Sat May 02 21:20:43 2020 +0200
@@ -1,24 +1,24 @@
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestChangesetController(TestController):
+class TestChangesetController(base.TestController):
 
     def test_index(self):
-        response = self.app.get(url(controller='changeset', action='index',
-                                    repo_name=HG_REPO, revision='tip'))
+        response = self.app.get(base.url(controller='changeset', action='index',
+                                    repo_name=base.HG_REPO, revision='tip'))
         # Test response...
 
     def test_changeset_range(self):
-        #print self.app.get(url(controller='changelog', action='index', repo_name=HG_REPO))
+        #print self.app.get(base.url(controller='changelog', action='index', repo_name=base.HG_REPO))
 
-        response = self.app.get(url(controller='changeset', action='index',
-                                    repo_name=HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52...96507bd11ecc815ebc6270fdf6db110928c09c1e'))
+        response = self.app.get(base.url(controller='changeset', action='index',
+                                    repo_name=base.HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52...96507bd11ecc815ebc6270fdf6db110928c09c1e'))
 
-        response = self.app.get(url(controller='changeset', action='changeset_raw',
-                                    repo_name=HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52'))
+        response = self.app.get(base.url(controller='changeset', action='changeset_raw',
+                                    repo_name=base.HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52'))
 
-        response = self.app.get(url(controller='changeset', action='changeset_patch',
-                                    repo_name=HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52'))
+        response = self.app.get(base.url(controller='changeset', action='changeset_patch',
+                                    repo_name=base.HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52'))
 
-        response = self.app.get(url(controller='changeset', action='changeset_download',
-                                    repo_name=HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52'))
+        response = self.app.get(base.url(controller='changeset', action='changeset_download',
+                                    repo_name=base.HG_REPO, revision='a53d9201d4bc278910d416d94941b7ea007ecd52'))
--- a/kallithea/tests/functional/test_changeset_pullrequests_comments.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_changeset_pullrequests_comments.py	Sat May 02 21:20:43 2020 +0200
@@ -3,10 +3,10 @@
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.db import ChangesetComment, PullRequest
 from kallithea.model.meta import Session
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestChangeSetCommentsController(TestController):
+class TestChangeSetCommentsController(base.TestController):
 
     def setup_method(self, method):
         for x in ChangesetComment.query().all():
@@ -16,17 +16,17 @@
     def test_create(self):
         self.log_user()
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
-        text = u'general comment on changeset'
+        text = 'general comment on changeset'
 
         params = {'text': text, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='changeset', action='comment',
-                                     repo_name=HG_REPO, revision=rev),
+        response = self.app.post(base.url(controller='changeset', action='comment',
+                                     repo_name=base.HG_REPO, revision=rev),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='changeset', action='index',
-                                repo_name=HG_REPO, revision=rev))
+        response = self.app.get(base.url(controller='changeset', action='index',
+                                repo_name=base.HG_REPO, revision=rev))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 1 comment (0 inline, 1 general)'''
@@ -39,19 +39,19 @@
     def test_create_inline(self):
         self.log_user()
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
-        text = u'inline comment on changeset'
+        text = 'inline comment on changeset'
         f_path = 'vcs/web/simplevcs/views/repository.py'
         line = 'n1'
 
         params = {'text': text, 'f_path': f_path, 'line': line, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='changeset', action='comment',
-                                     repo_name=HG_REPO, revision=rev),
+        response = self.app.post(base.url(controller='changeset', action='comment',
+                                     repo_name=base.HG_REPO, revision=rev),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='changeset', action='index',
-                                repo_name=HG_REPO, revision=rev))
+        response = self.app.get(base.url(controller='changeset', action='index',
+                                repo_name=base.HG_REPO, revision=rev))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 1 comment (1 inline, 0 general)'''
@@ -70,22 +70,22 @@
         self.log_user()
 
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
-        text = u'@%s check CommentOnRevision' % TEST_USER_REGULAR_LOGIN
+        text = '@%s check CommentOnRevision' % base.TEST_USER_REGULAR_LOGIN
 
         params = {'text': text, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='changeset', action='comment',
-                                     repo_name=HG_REPO, revision=rev),
+        response = self.app.post(base.url(controller='changeset', action='comment',
+                                     repo_name=base.HG_REPO, revision=rev),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='changeset', action='index',
-                                repo_name=HG_REPO, revision=rev))
+        response = self.app.get(base.url(controller='changeset', action='index',
+                                repo_name=base.HG_REPO, revision=rev))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 1 comment (0 inline, 1 general)'''
         )
-        response.mustcontain('<b>@%s</b> check CommentOnRevision' % TEST_USER_REGULAR_LOGIN)
+        response.mustcontain('<b>@%s</b> check CommentOnRevision' % base.TEST_USER_REGULAR_LOGIN)
 
         # test DB
         assert ChangesetComment.query().count() == 1
@@ -93,18 +93,18 @@
     def test_create_status_change(self):
         self.log_user()
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
-        text = u'general comment on changeset'
+        text = 'general comment on changeset'
 
         params = {'text': text, 'changeset_status': 'rejected',
                 '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='changeset', action='comment',
-                                     repo_name=HG_REPO, revision=rev),
+        response = self.app.post(base.url(controller='changeset', action='comment',
+                                     repo_name=base.HG_REPO, revision=rev),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='changeset', action='index',
-                                repo_name=HG_REPO, revision=rev))
+        response = self.app.get(base.url(controller='changeset', action='index',
+                                repo_name=base.HG_REPO, revision=rev))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 1 comment (0 inline, 1 general)'''
@@ -115,33 +115,33 @@
         assert ChangesetComment.query().count() == 1
 
         # check status
-        status = ChangesetStatusModel().get_status(repo=HG_REPO, revision=rev)
+        status = ChangesetStatusModel().get_status(repo=base.HG_REPO, revision=rev)
         assert status == 'rejected'
 
     def test_delete(self):
         self.log_user()
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
-        text = u'general comment on changeset to be deleted'
+        text = 'general comment on changeset to be deleted'
 
         params = {'text': text, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='changeset', action='comment',
-                                     repo_name=HG_REPO, revision=rev),
+        response = self.app.post(base.url(controller='changeset', action='comment',
+                                     repo_name=base.HG_REPO, revision=rev),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
 
         comments = ChangesetComment.query().all()
         assert len(comments) == 1
         comment_id = comments[0].comment_id
 
-        self.app.post(url("changeset_comment_delete",
-                                    repo_name=HG_REPO,
+        self.app.post(base.url("changeset_comment_delete",
+                                    repo_name=base.HG_REPO,
                                     comment_id=comment_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         comments = ChangesetComment.query().all()
         assert len(comments) == 0
 
-        response = self.app.get(url(controller='changeset', action='index',
-                                repo_name=HG_REPO, revision=rev))
+        response = self.app.get(base.url(controller='changeset', action='index',
+                                repo_name=base.HG_REPO, revision=rev))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 0 comments (0 inline, 0 general)'''
@@ -149,7 +149,7 @@
         response.mustcontain(no=text)
 
 
-class TestPullrequestsCommentsController(TestController):
+class TestPullrequestsCommentsController(base.TestController):
 
     def setup_method(self, method):
         for x in ChangesetComment.query().all():
@@ -157,34 +157,34 @@
         Session().commit()
 
     def _create_pr(self):
-        response = self.app.post(url(controller='pullrequests', action='create',
-                                     repo_name=HG_REPO),
-                                 {'org_repo': HG_REPO,
+        response = self.app.post(base.url(controller='pullrequests', action='create',
+                                     repo_name=base.HG_REPO),
+                                 {'org_repo': base.HG_REPO,
                                   'org_ref': 'branch:stable:4f7e2131323e0749a740c0a56ab68ae9269c562a',
-                                  'other_repo': HG_REPO,
+                                  'other_repo': base.HG_REPO,
                                   'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
                                   '_session_csrf_secret_token': self.session_csrf_secret_token(),
                                  },
                                  status=302)
-        pr_id = int(re.search('/pull-request/(\d+)/', response.location).group(1))
+        pr_id = int(re.search(r'/pull-request/(\d+)/', response.location).group(1))
         return pr_id
 
     def test_create(self):
         self.log_user()
         pr_id = self._create_pr()
 
-        text = u'general comment on pullrequest'
+        text = 'general comment on pullrequest'
         params = {'text': text, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='pullrequests', action='show',
-                                repo_name=HG_REPO, pull_request_id=pr_id, extra=''))
+        response = self.app.get(base.url(controller='pullrequests', action='show',
+                                repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''))
         # PRs currently always have an initial 'Under Review' status change
         # that counts as a general comment, hence '2' in the test below. That
         # could be counted as a misfeature, to be reworked later.
@@ -201,18 +201,18 @@
         self.log_user()
         pr_id = self._create_pr()
 
-        text = u'inline comment on changeset'
+        text = 'inline comment on changeset'
         f_path = 'vcs/web/simplevcs/views/repository.py'
         line = 'n1'
         params = {'text': text, 'f_path': f_path, 'line': line, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='pullrequests', action='show',
-                                repo_name=HG_REPO, pull_request_id=pr_id, extra=''))
+        response = self.app.get(base.url(controller='pullrequests', action='show',
+                                repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 2 comments (1 inline, 1 general)'''
@@ -231,21 +231,21 @@
         self.log_user()
         pr_id = self._create_pr()
 
-        text = u'@%s check CommentOnRevision' % TEST_USER_REGULAR_LOGIN
+        text = '@%s check CommentOnRevision' % base.TEST_USER_REGULAR_LOGIN
         params = {'text': text, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='pullrequests', action='show',
-                                repo_name=HG_REPO, pull_request_id=pr_id, extra=''))
+        response = self.app.get(base.url(controller='pullrequests', action='show',
+                                repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 2 comments (0 inline, 2 general)'''
         )
-        response.mustcontain('<b>@%s</b> check CommentOnRevision' % TEST_USER_REGULAR_LOGIN)
+        response.mustcontain('<b>@%s</b> check CommentOnRevision' % base.TEST_USER_REGULAR_LOGIN)
 
         # test DB
         assert ChangesetComment.query().count() == 2
@@ -254,17 +254,17 @@
         self.log_user()
         pr_id = self._create_pr()
 
-        text = u'general comment on pullrequest'
+        text = 'general comment on pullrequest'
         params = {'text': text, 'changeset_status': 'rejected',
                 '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='pullrequests', action='show',
-                                repo_name=HG_REPO, pull_request_id=pr_id, extra=''))
+        response = self.app.get(base.url(controller='pullrequests', action='show',
+                                repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''))
         # PRs currently always have an initial 'Under Review' status change
         # that counts as a general comment, hence '2' in the test below. That
         # could be counted as a misfeature, to be reworked later.
@@ -278,33 +278,33 @@
         assert ChangesetComment.query().count() == 2
 
         # check status
-        status = ChangesetStatusModel().get_status(repo=HG_REPO, pull_request=pr_id)
+        status = ChangesetStatusModel().get_status(repo=base.HG_REPO, pull_request=pr_id)
         assert status == 'rejected'
 
     def test_delete(self):
         self.log_user()
         pr_id = self._create_pr()
 
-        text = u'general comment on changeset to be deleted'
+        text = 'general comment on changeset to be deleted'
         params = {'text': text, '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
 
         comments = ChangesetComment.query().all()
         assert len(comments) == 2
         comment_id = comments[-1].comment_id
 
-        self.app.post(url("pullrequest_comment_delete",
-                                    repo_name=HG_REPO,
+        self.app.post(base.url("pullrequest_comment_delete",
+                                    repo_name=base.HG_REPO,
                                     comment_id=comment_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         comments = ChangesetComment.query().all()
         assert len(comments) == 1
 
-        response = self.app.get(url(controller='pullrequests', action='show',
-                                repo_name=HG_REPO, pull_request_id=pr_id, extra=''))
+        response = self.app.get(base.url(controller='pullrequests', action='show',
+                                repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''))
         response.mustcontain(
             '''<div class="comments-number">'''
             ''' 1 comment (0 inline, 1 general)'''
@@ -315,17 +315,17 @@
         self.log_user()
         pr_id = self._create_pr()
 
-        text = u'general comment on pullrequest'
+        text = 'general comment on pullrequest'
         params = {'text': text, 'save_close': 'close',
                 '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='pullrequests', action='show',
-                                repo_name=HG_REPO, pull_request_id=pr_id, extra=''))
+        response = self.app.get(base.url(controller='pullrequests', action='show',
+                                repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''))
         response.mustcontain(
             '''title (Closed)'''
         )
@@ -338,17 +338,17 @@
         self.log_user()
         pr_id = self._create_pr()
 
-        text = u'general comment on pullrequest'
+        text = 'general comment on pullrequest'
         params = {'text': text, 'save_delete': 'delete',
                 '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         # Test response...
         assert response.status == '200 OK'
 
-        response = self.app.get(url(controller='pullrequests', action='show',
-                                repo_name=HG_REPO, pull_request_id=pr_id, extra=''), status=404)
+        response = self.app.get(base.url(controller='pullrequests', action='show',
+                                repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''), status=404)
 
         # test DB
         assert PullRequest.get(pr_id) is None
@@ -358,19 +358,19 @@
         pr_id = self._create_pr()
 
         # first close
-        text = u'general comment on pullrequest'
+        text = 'general comment on pullrequest'
         params = {'text': text, 'save_close': 'close',
                 '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
         assert response.status == '200 OK'
 
         # attempt delete, should fail
         params = {'text': text, 'save_delete': 'delete',
                 '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        response = self.app.post(url(controller='pullrequests', action='comment',
-                                     repo_name=HG_REPO, pull_request_id=pr_id),
+        response = self.app.post(base.url(controller='pullrequests', action='comment',
+                                     repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'}, status=403)
 
         # verify that PR still exists, in closed state
--- a/kallithea/tests/functional/test_compare.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_compare.py	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,7 @@
 # -*- coding: utf-8 -*-
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
@@ -12,7 +12,7 @@
     return '''<div class="message-firstline"><a class="message-link" href="/%s/changeset/%s">%s</a></div>''' % (repo_name, sha, msg)
 
 
-class TestCompareController(TestController):
+class TestCompareController(base.TestController):
 
     def setup_method(self, method):
         self.r1_id = None
@@ -28,9 +28,9 @@
 
     def test_compare_forks_on_branch_extra_commits_hg(self):
         self.log_user()
-        repo1 = fixture.create_repo(u'one', repo_type='hg',
+        repo1 = fixture.create_repo('one', repo_type='hg',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
         self.r1_id = repo1.repo_id
         # commit something !
         cs0 = fixture.commit_change(repo1.repo_name, filename='file1',
@@ -38,7 +38,7 @@
                 parent=None, newfile=True)
 
         # fork this repo
-        repo2 = fixture.create_fork(u'one', u'one-fork')
+        repo2 = fixture.create_fork('one', 'one-fork')
         self.r2_id = repo2.repo_id
 
         # add two extra commit into fork
@@ -53,7 +53,7 @@
         rev1 = 'default'
         rev2 = 'default'
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=repo1.repo_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev2,
@@ -79,9 +79,9 @@
 
     def test_compare_forks_on_branch_extra_commits_git(self):
         self.log_user()
-        repo1 = fixture.create_repo(u'one-git', repo_type='git',
+        repo1 = fixture.create_repo('one-git', repo_type='git',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
         self.r1_id = repo1.repo_id
         # commit something !
         cs0 = fixture.commit_change(repo1.repo_name, filename='file1',
@@ -89,7 +89,7 @@
                 parent=None, newfile=True)
 
         # fork this repo
-        repo2 = fixture.create_fork(u'one-git', u'one-git-fork')
+        repo2 = fixture.create_fork('one-git', 'one-git-fork')
         self.r2_id = repo2.repo_id
 
         # add two extra commit into fork
@@ -104,7 +104,7 @@
         rev1 = 'master'
         rev2 = 'master'
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=repo1.repo_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev2,
@@ -131,9 +131,9 @@
     def test_compare_forks_on_branch_extra_commits_origin_has_incoming_hg(self):
         self.log_user()
 
-        repo1 = fixture.create_repo(u'one', repo_type='hg',
+        repo1 = fixture.create_repo('one', repo_type='hg',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
 
         self.r1_id = repo1.repo_id
 
@@ -143,7 +143,7 @@
                 parent=None, newfile=True)
 
         # fork this repo
-        repo2 = fixture.create_fork(u'one', u'one-fork')
+        repo2 = fixture.create_fork('one', 'one-fork')
         self.r2_id = repo2.repo_id
 
         # now commit something to origin repo
@@ -163,7 +163,7 @@
         rev1 = 'default'
         rev2 = 'default'
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=repo1.repo_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev2,
@@ -190,9 +190,9 @@
     def test_compare_forks_on_branch_extra_commits_origin_has_incoming_git(self):
         self.log_user()
 
-        repo1 = fixture.create_repo(u'one-git', repo_type='git',
+        repo1 = fixture.create_repo('one-git', repo_type='git',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
 
         self.r1_id = repo1.repo_id
 
@@ -202,7 +202,7 @@
                 parent=None, newfile=True)
 
         # fork this repo
-        repo2 = fixture.create_fork(u'one-git', u'one-git-fork')
+        repo2 = fixture.create_fork('one-git', 'one-git-fork')
         self.r2_id = repo2.repo_id
 
         # now commit something to origin repo
@@ -222,7 +222,7 @@
         rev1 = 'master'
         rev2 = 'master'
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=repo1.repo_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev2,
@@ -261,9 +261,9 @@
         # make repo1, and cs1+cs2
         self.log_user()
 
-        repo1 = fixture.create_repo(u'repo1', repo_type='hg',
+        repo1 = fixture.create_repo('repo1', repo_type='hg',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
         self.r1_id = repo1.repo_id
 
         # commit something !
@@ -274,7 +274,7 @@
                 content='line1\nline2\n', message='commit2', vcs_type='hg',
                 parent=cs0)
         # fork this repo
-        repo2 = fixture.create_fork(u'repo1', u'repo1-fork')
+        repo2 = fixture.create_fork('repo1', 'repo1-fork')
         self.r2_id = repo2.repo_id
         # now make cs3-6
         cs2 = fixture.commit_change(repo1.repo_name, filename='file1',
@@ -290,7 +290,7 @@
                 content='line1\nline2\nline3\nline4\nline5\nline6\n',
                 message='commit6', vcs_type='hg', parent=cs4)
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=repo2.repo_name,
                                     org_ref_type="rev",
                                     org_ref_name=cs1.short_id,  # parent of cs2, in repo2
@@ -329,9 +329,9 @@
 #
         # make repo1, and cs1+cs2
         self.log_user()
-        repo1 = fixture.create_repo(u'repo1', repo_type='hg',
+        repo1 = fixture.create_repo('repo1', repo_type='hg',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
         self.r1_id = repo1.repo_id
 
         # commit something !
@@ -342,7 +342,7 @@
                 content='line1\nline2\n', message='commit2', vcs_type='hg',
                 parent=cs0)
         # fork this repo
-        repo2 = fixture.create_fork(u'repo1', u'repo1-fork')
+        repo2 = fixture.create_fork('repo1', 'repo1-fork')
         self.r2_id = repo2.repo_id
         # now make cs3-6
         cs2 = fixture.commit_change(repo1.repo_name, filename='file1',
@@ -358,7 +358,7 @@
                 content='line1\nline2\nline3\nline4\nline5\nline6\n',
                 message='commit6', vcs_type='hg', parent=cs4)
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=repo1.repo_name,
                                     org_ref_type="rev",
                                     org_ref_name=cs2.short_id, # parent of cs3, not in repo2
@@ -388,27 +388,27 @@
     def test_compare_remote_branches_hg(self):
         self.log_user()
 
-        repo2 = fixture.create_fork(HG_REPO, HG_FORK)
+        repo2 = fixture.create_fork(base.HG_REPO, base.HG_FORK)
         self.r2_id = repo2.repo_id
         rev1 = '56349e29c2af'
         rev2 = '7d4bc8ec6be5'
 
-        response = self.app.get(url('compare_url',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.HG_REPO,
                                     org_ref_type="rev",
                                     org_ref_name=rev1,
                                     other_ref_type="rev",
                                     other_ref_name=rev2,
-                                    other_repo=HG_FORK,
+                                    other_repo=base.HG_FORK,
                                     merge='1',))
 
-        response.mustcontain('%s@%s' % (HG_REPO, rev1))
-        response.mustcontain('%s@%s' % (HG_FORK, rev2))
+        response.mustcontain('%s@%s' % (base.HG_REPO, rev1))
+        response.mustcontain('%s@%s' % (base.HG_FORK, rev2))
         ## outgoing changesets between those revisions
 
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/2dda4e345facb0ccff1a191052dd1606dba6781d">r4:2dda4e345fac</a>""" % (HG_FORK))
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/6fff84722075f1607a30f436523403845f84cd9e">r5:6fff84722075</a>""" % (HG_FORK))
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7">r6:%s</a>""" % (HG_FORK, rev2))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/2dda4e345facb0ccff1a191052dd1606dba6781d">r4:2dda4e345fac</a>""" % (base.HG_FORK))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/6fff84722075f1607a30f436523403845f84cd9e">r5:6fff84722075</a>""" % (base.HG_FORK))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7">r6:%s</a>""" % (base.HG_FORK, rev2))
 
         ## files
         response.mustcontain("""<a href="#C--9c390eb52cd6">vcs/backends/hg.py</a>""")
@@ -418,27 +418,27 @@
     def test_compare_remote_branches_git(self):
         self.log_user()
 
-        repo2 = fixture.create_fork(GIT_REPO, GIT_FORK)
+        repo2 = fixture.create_fork(base.GIT_REPO, base.GIT_FORK)
         self.r2_id = repo2.repo_id
         rev1 = '102607b09cdd60e2793929c4f90478be29f85a17'
         rev2 = 'd7e0d30fbcae12c90680eb095a4f5f02505ce501'
 
-        response = self.app.get(url('compare_url',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.GIT_REPO,
                                     org_ref_type="rev",
                                     org_ref_name=rev1,
                                     other_ref_type="rev",
                                     other_ref_name=rev2,
-                                    other_repo=GIT_FORK,
+                                    other_repo=base.GIT_FORK,
                                     merge='1',))
 
-        response.mustcontain('%s@%s' % (GIT_REPO, rev1))
-        response.mustcontain('%s@%s' % (GIT_FORK, rev2))
+        response.mustcontain('%s@%s' % (base.GIT_REPO, rev1))
+        response.mustcontain('%s@%s' % (base.GIT_FORK, rev2))
         ## outgoing changesets between those revisions
 
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/49d3fd156b6f7db46313fac355dca1a0b94a0017">r4:49d3fd156b6f</a>""" % (GIT_FORK))
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/2d1028c054665b962fa3d307adfc923ddd528038">r5:2d1028c05466</a>""" % (GIT_FORK))
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/d7e0d30fbcae12c90680eb095a4f5f02505ce501">r6:%s</a>""" % (GIT_FORK, rev2[:12]))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/49d3fd156b6f7db46313fac355dca1a0b94a0017">r4:49d3fd156b6f</a>""" % (base.GIT_FORK))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/2d1028c054665b962fa3d307adfc923ddd528038">r5:2d1028c05466</a>""" % (base.GIT_FORK))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/d7e0d30fbcae12c90680eb095a4f5f02505ce501">r6:%s</a>""" % (base.GIT_FORK, rev2[:12]))
 
         ## files
         response.mustcontain("""<a href="#C--9c390eb52cd6">vcs/backends/hg.py</a>""")
@@ -448,9 +448,9 @@
     def test_org_repo_new_commits_after_forking_simple_diff_hg(self):
         self.log_user()
 
-        repo1 = fixture.create_repo(u'one', repo_type='hg',
+        repo1 = fixture.create_repo('one', repo_type='hg',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
 
         self.r1_id = repo1.repo_id
         r1_name = repo1.repo_name
@@ -460,8 +460,8 @@
         Session().commit()
         assert repo1.scm_instance.revisions == [cs0.raw_id]
         # fork the repo1
-        repo2 = fixture.create_fork(r1_name, u'one-fork',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+        repo2 = fixture.create_fork(r1_name, 'one-fork',
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
         Session().commit()
         assert repo2.scm_instance.revisions == [cs0.raw_id]
         self.r2_id = repo2.repo_id
@@ -482,7 +482,7 @@
         rev1 = 'default'
         rev2 = 'default'
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=r2_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev2,
@@ -500,7 +500,7 @@
         # compare !
         rev1 = 'default'
         rev2 = 'default'
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=r2_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev2,
@@ -520,9 +520,9 @@
     def test_org_repo_new_commits_after_forking_simple_diff_git(self):
         self.log_user()
 
-        repo1 = fixture.create_repo(u'one-git', repo_type='git',
+        repo1 = fixture.create_repo('one-git', repo_type='git',
                                     repo_description='diff-test',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
 
         self.r1_id = repo1.repo_id
         r1_name = repo1.repo_name
@@ -533,8 +533,8 @@
         Session().commit()
         assert repo1.scm_instance.revisions == [cs0.raw_id]
         # fork the repo1
-        repo2 = fixture.create_fork(r1_name, u'one-git-fork',
-                                    cur_user=TEST_USER_ADMIN_LOGIN)
+        repo2 = fixture.create_fork(r1_name, 'one-git-fork',
+                                    cur_user=base.TEST_USER_ADMIN_LOGIN)
         Session().commit()
         assert repo2.scm_instance.revisions == [cs0.raw_id]
         self.r2_id = repo2.repo_id
@@ -556,7 +556,7 @@
         rev1 = 'master'
         rev2 = 'master'
 
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=r2_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev1,
@@ -574,7 +574,7 @@
         # compare !
         rev1 = 'master'
         rev2 = 'master'
-        response = self.app.get(url('compare_url',
+        response = self.app.get(base.url('compare_url',
                                     repo_name=r2_name,
                                     org_ref_type="branch",
                                     org_ref_name=rev1,
--- a/kallithea/tests/functional/test_compare_local.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_compare_local.py	Sat May 02 21:20:43 2020 +0200
@@ -1,31 +1,31 @@
 # -*- coding: utf-8 -*-
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestCompareController(TestController):
+class TestCompareController(base.TestController):
 
     def test_compare_tag_hg(self):
         self.log_user()
         tag1 = 'v0.1.2'
         tag2 = 'v0.1.3'
-        response = self.app.get(url('compare_url',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.HG_REPO,
                                     org_ref_type="tag",
                                     org_ref_name=tag1,
                                     other_ref_type="tag",
                                     other_ref_name=tag2,
                                     ), status=200)
-        response.mustcontain('%s@%s' % (HG_REPO, tag1))
-        response.mustcontain('%s@%s' % (HG_REPO, tag2))
+        response.mustcontain('%s@%s' % (base.HG_REPO, tag1))
+        response.mustcontain('%s@%s' % (base.HG_REPO, tag2))
 
         ## outgoing changesets between tags
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/c5ddebc06eaaba3010c2d66ea6ec9d074eb0f678">r112:c5ddebc06eaa</a>''' % HG_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/70d4cef8a37657ee4cf5aabb3bd9f68879769816">r115:70d4cef8a376</a>''' % HG_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/9749bfbfc0d2eba208d7947de266303b67c87cda">r116:9749bfbfc0d2</a>''' % HG_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/41fda979f02fda216374bf8edac4e83f69e7581c">r117:41fda979f02f</a>''' % HG_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/bb1a3ab98cc45cb934a77dcabf87a5a598b59e97">r118:bb1a3ab98cc4</a>''' % HG_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/36e0fc9d2808c5022a24f49d6658330383ed8666">r119:36e0fc9d2808</a>''' % HG_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/17544fbfcd33ffb439e2b728b5d526b1ef30bfcf">r120:17544fbfcd33</a>''' % HG_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/c5ddebc06eaaba3010c2d66ea6ec9d074eb0f678">r112:c5ddebc06eaa</a>''' % base.HG_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/70d4cef8a37657ee4cf5aabb3bd9f68879769816">r115:70d4cef8a376</a>''' % base.HG_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/9749bfbfc0d2eba208d7947de266303b67c87cda">r116:9749bfbfc0d2</a>''' % base.HG_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/41fda979f02fda216374bf8edac4e83f69e7581c">r117:41fda979f02f</a>''' % base.HG_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/bb1a3ab98cc45cb934a77dcabf87a5a598b59e97">r118:bb1a3ab98cc4</a>''' % base.HG_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/36e0fc9d2808c5022a24f49d6658330383ed8666">r119:36e0fc9d2808</a>''' % base.HG_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/17544fbfcd33ffb439e2b728b5d526b1ef30bfcf">r120:17544fbfcd33</a>''' % base.HG_REPO)
 
         response.mustcontain('11 files changed with 94 insertions and 64 deletions')
 
@@ -80,24 +80,24 @@
         self.log_user()
         tag1 = 'v0.1.2'
         tag2 = 'v0.1.3'
-        response = self.app.get(url('compare_url',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.GIT_REPO,
                                     org_ref_type="tag",
                                     org_ref_name=tag1,
                                     other_ref_type="tag",
                                     other_ref_name=tag2,
                                     ), status=200)
-        response.mustcontain('%s@%s' % (GIT_REPO, tag1))
-        response.mustcontain('%s@%s' % (GIT_REPO, tag2))
+        response.mustcontain('%s@%s' % (base.GIT_REPO, tag1))
+        response.mustcontain('%s@%s' % (base.GIT_REPO, tag2))
 
         ## outgoing changesets between tags
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/794bbdd31545c199f74912709ea350dedcd189a2">r113:794bbdd31545</a>''' % GIT_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/e36d8c5025329bdd4212bd53d4ed8a70ff44985f">r115:e36d8c502532</a>''' % GIT_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/5c9ff4f6d7508db0e72b1d2991c357d0d8e07af2">r116:5c9ff4f6d750</a>''' % GIT_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/b7187fa2b8c1d773ec35e9dee12f01f74808c879">r117:b7187fa2b8c1</a>''' % GIT_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/5f3b74262014a8de2dc7dade1152de9fd0c8efef">r118:5f3b74262014</a>''' % GIT_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/17438a11f72b93f56d0e08e7d1fa79a378578a82">r119:17438a11f72b</a>''' % GIT_REPO)
-        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/5a3a8fb005554692b16e21dee62bf02667d8dc3e">r120:5a3a8fb00555</a>''' % GIT_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/794bbdd31545c199f74912709ea350dedcd189a2">r113:794bbdd31545</a>''' % base.GIT_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/e36d8c5025329bdd4212bd53d4ed8a70ff44985f">r115:e36d8c502532</a>''' % base.GIT_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/5c9ff4f6d7508db0e72b1d2991c357d0d8e07af2">r116:5c9ff4f6d750</a>''' % base.GIT_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/b7187fa2b8c1d773ec35e9dee12f01f74808c879">r117:b7187fa2b8c1</a>''' % base.GIT_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/5f3b74262014a8de2dc7dade1152de9fd0c8efef">r118:5f3b74262014</a>''' % base.GIT_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/17438a11f72b93f56d0e08e7d1fa79a378578a82">r119:17438a11f72b</a>''' % base.GIT_REPO)
+        response.mustcontain('''<a class="changeset_hash" href="/%s/changeset/5a3a8fb005554692b16e21dee62bf02667d8dc3e">r120:5a3a8fb00555</a>''' % base.GIT_REPO)
 
         response.mustcontain('11 files changed with 94 insertions and 64 deletions')
 
@@ -116,32 +116,32 @@
 
     def test_index_branch_hg(self):
         self.log_user()
-        response = self.app.get(url('compare_url',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.HG_REPO,
                                     org_ref_type="branch",
                                     org_ref_name='default',
                                     other_ref_type="branch",
                                     other_ref_name='default',
                                     ))
 
-        response.mustcontain('%s@default' % (HG_REPO))
-        response.mustcontain('%s@default' % (HG_REPO))
+        response.mustcontain('%s@default' % (base.HG_REPO))
+        response.mustcontain('%s@default' % (base.HG_REPO))
         # branch are equal
         response.mustcontain('<span class="text-muted">No files</span>')
         response.mustcontain('<span class="text-muted">No changesets</span>')
 
     def test_index_branch_git(self):
         self.log_user()
-        response = self.app.get(url('compare_url',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.GIT_REPO,
                                     org_ref_type="branch",
                                     org_ref_name='master',
                                     other_ref_type="branch",
                                     other_ref_name='master',
                                     ))
 
-        response.mustcontain('%s@master' % (GIT_REPO))
-        response.mustcontain('%s@master' % (GIT_REPO))
+        response.mustcontain('%s@master' % (base.GIT_REPO))
+        response.mustcontain('%s@master' % (base.GIT_REPO))
         # branch are equal
         response.mustcontain('<span class="text-muted">No files</span>')
         response.mustcontain('<span class="text-muted">No changesets</span>')
@@ -151,18 +151,18 @@
         rev1 = 'b986218ba1c9'
         rev2 = '3d8f361e72ab'
 
-        response = self.app.get(url('compare_url',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.HG_REPO,
                                     org_ref_type="rev",
                                     org_ref_name=rev1,
                                     other_ref_type="rev",
                                     other_ref_name=rev2,
                                     ))
-        response.mustcontain('%s@%s' % (HG_REPO, rev1))
-        response.mustcontain('%s@%s' % (HG_REPO, rev2))
+        response.mustcontain('%s@%s' % (base.HG_REPO, rev1))
+        response.mustcontain('%s@%s' % (base.HG_REPO, rev2))
 
         ## outgoing changesets between those revisions
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/3d8f361e72ab303da48d799ff1ac40d5ac37c67e">r1:%s</a>""" % (HG_REPO, rev2))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/3d8f361e72ab303da48d799ff1ac40d5ac37c67e">r1:%s</a>""" % (base.HG_REPO, rev2))
 
         response.mustcontain('1 file changed with 7 insertions and 0 deletions')
         ## files
@@ -173,18 +173,18 @@
         rev1 = 'c1214f7e79e02fc37156ff215cd71275450cffc3'
         rev2 = '38b5fe81f109cb111f549bfe9bb6b267e10bc557'
 
-        response = self.app.get(url('compare_url',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.GIT_REPO,
                                     org_ref_type="rev",
                                     org_ref_name=rev1,
                                     other_ref_type="rev",
                                     other_ref_name=rev2,
                                     ))
-        response.mustcontain('%s@%s' % (GIT_REPO, rev1))
-        response.mustcontain('%s@%s' % (GIT_REPO, rev2))
+        response.mustcontain('%s@%s' % (base.GIT_REPO, rev1))
+        response.mustcontain('%s@%s' % (base.GIT_REPO, rev2))
 
         ## outgoing changesets between those revisions
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/38b5fe81f109cb111f549bfe9bb6b267e10bc557">r1:%s</a>""" % (GIT_REPO, rev2[:12]))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/38b5fe81f109cb111f549bfe9bb6b267e10bc557">r1:%s</a>""" % (base.GIT_REPO, rev2[:12]))
         response.mustcontain('1 file changed with 7 insertions and 0 deletions')
 
         ## files
@@ -195,8 +195,8 @@
         rev1 = 'b986218ba1c9'
         rev2 = '3d8f361e72ab'
 
-        response = self.app.get(url('compare_url',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.HG_REPO,
                                     org_ref_type="rev",
                                     org_ref_name=rev1,
                                     other_ref_type="rev",
@@ -206,18 +206,18 @@
                                 extra_environ={'HTTP_X_PARTIAL_XHR': '1'},)
 
         ## outgoing changesets between those revisions
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/3d8f361e72ab303da48d799ff1ac40d5ac37c67e">r1:%s</a>""" % (HG_REPO, rev2))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/3d8f361e72ab303da48d799ff1ac40d5ac37c67e">r1:%s</a>""" % (base.HG_REPO, rev2))
 
         response.mustcontain('Merge Ancestor')
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/b986218ba1c9b0d6a259fac9b050b1724ed8e545">%s</a>""" % (HG_REPO, rev1))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/b986218ba1c9b0d6a259fac9b050b1724ed8e545">%s</a>""" % (base.HG_REPO, rev1))
 
     def test_compare_revisions_git_is_ajax_preview(self):
         self.log_user()
         rev1 = 'c1214f7e79e02fc37156ff215cd71275450cffc3'
         rev2 = '38b5fe81f109cb111f549bfe9bb6b267e10bc557'
 
-        response = self.app.get(url('compare_url',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url('compare_url',
+                                    repo_name=base.GIT_REPO,
                                     org_ref_type="rev",
                                     org_ref_name=rev1,
                                     other_ref_type="rev",
@@ -226,7 +226,7 @@
                                     ),
                                 extra_environ={'HTTP_X_PARTIAL_XHR': '1'},)
         ## outgoing changesets between those revisions
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/38b5fe81f109cb111f549bfe9bb6b267e10bc557">r1:%s</a>""" % (GIT_REPO, rev2[:12]))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/38b5fe81f109cb111f549bfe9bb6b267e10bc557">r1:%s</a>""" % (base.GIT_REPO, rev2[:12]))
 
         response.mustcontain('Merge Ancestor')
-        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/c1214f7e79e02fc37156ff215cd71275450cffc3">%s</a>""" % (GIT_REPO, rev1[:12]))
+        response.mustcontain("""<a class="changeset_hash" href="/%s/changeset/c1214f7e79e02fc37156ff215cd71275450cffc3">%s</a>""" % (base.GIT_REPO, rev1[:12]))
--- a/kallithea/tests/functional/test_feed.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_feed.py	Sat May 02 21:20:43 2020 +0200
@@ -1,20 +1,20 @@
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestFeedController(TestController):
+class TestFeedController(base.TestController):
 
     def test_rss(self):
         self.log_user()
-        response = self.app.get(url(controller='feed', action='rss',
-                                    repo_name=HG_REPO))
+        response = self.app.get(base.url(controller='feed', action='rss',
+                                    repo_name=base.HG_REPO))
 
         assert response.content_type == "application/rss+xml"
         assert """<rss version="2.0">""" in response
 
     def test_atom(self):
         self.log_user()
-        response = self.app.get(url(controller='feed', action='atom',
-                                    repo_name=HG_REPO))
+        response = self.app.get(base.url(controller='feed', action='atom',
+                                    repo_name=base.HG_REPO))
 
         assert response.content_type == """application/atom+xml"""
         assert """<?xml version="1.0" encoding="utf-8"?>""" in response
--- a/kallithea/tests/functional/test_files.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_files.py	Sat May 02 21:20:43 2020 +0200
@@ -1,10 +1,11 @@
 # -*- coding: utf-8 -*-
+import json
 import mimetypes
 import posixpath
 
 from kallithea.model.db import Repository
 from kallithea.model.meta import Session
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
@@ -26,51 +27,51 @@
     Session().commit()
 
 
-class TestFilesController(TestController):
+class TestFilesController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='index',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='index',
+                                    repo_name=base.HG_REPO,
                                     revision='tip',
                                     f_path='/'))
         # Test response...
-        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/docs"><i class="icon-folder-open"></i><span>docs</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/vcs"><i class="icon-folder-open"></i><span>vcs</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.gitignore"><i class="icon-doc"></i><span>.gitignore</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.hgignore"><i class="icon-doc"></i><span>.hgignore</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.hgtags"><i class="icon-doc"></i><span>.hgtags</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.travis.yml"><i class="icon-doc"></i><span>.travis.yml</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/MANIFEST.in"><i class="icon-doc"></i><span>MANIFEST.in</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/README.rst"><i class="icon-doc"></i><span>README.rst</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/run_test_and_report.sh"><i class="icon-doc"></i><span>run_test_and_report.sh</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/setup.cfg"><i class="icon-doc"></i><span>setup.cfg</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/setup.py"><i class="icon-doc"></i><span>setup.py</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/test_and_report.sh"><i class="icon-doc"></i><span>test_and_report.sh</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/tox.ini"><i class="icon-doc"></i><span>tox.ini</span></a>' % HG_REPO)
+        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/docs"><i class="icon-folder-open"></i><span>docs</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/vcs"><i class="icon-folder-open"></i><span>vcs</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.gitignore"><i class="icon-doc"></i><span>.gitignore</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.hgignore"><i class="icon-doc"></i><span>.hgignore</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.hgtags"><i class="icon-doc"></i><span>.hgtags</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.travis.yml"><i class="icon-doc"></i><span>.travis.yml</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/MANIFEST.in"><i class="icon-doc"></i><span>MANIFEST.in</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/README.rst"><i class="icon-doc"></i><span>README.rst</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/run_test_and_report.sh"><i class="icon-doc"></i><span>run_test_and_report.sh</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/setup.cfg"><i class="icon-doc"></i><span>setup.cfg</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/setup.py"><i class="icon-doc"></i><span>setup.py</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/test_and_report.sh"><i class="icon-doc"></i><span>test_and_report.sh</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/tox.ini"><i class="icon-doc"></i><span>tox.ini</span></a>' % base.HG_REPO)
 
     def test_index_revision(self):
         self.log_user()
 
         response = self.app.get(
-            url(controller='files', action='index',
-                repo_name=HG_REPO,
+            base.url(controller='files', action='index',
+                repo_name=base.HG_REPO,
                 revision='7ba66bec8d6dbba14a2155be32408c435c5f4492',
                 f_path='/')
         )
 
         # Test response...
 
-        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/docs"><i class="icon-folder-open"></i><span>docs</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/tests"><i class="icon-folder-open"></i><span>tests</span></a>' % HG_REPO)
-        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/README.rst"><i class="icon-doc"></i><span>README.rst</span></a>' % HG_REPO)
+        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/docs"><i class="icon-folder-open"></i><span>docs</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/tests"><i class="icon-folder-open"></i><span>tests</span></a>' % base.HG_REPO)
+        response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/README.rst"><i class="icon-doc"></i><span>README.rst</span></a>' % base.HG_REPO)
         response.mustcontain('1.1 KiB')
 
     def test_index_different_branch(self):
         self.log_user()
 
-        response = self.app.get(url(controller='files', action='index',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='index',
+                                    repo_name=base.HG_REPO,
                                     revision='97e8b885c04894463c51898e14387d80c30ed1ee',
                                     f_path='/'))
 
@@ -85,8 +86,8 @@
                   (1, '3d8f361e72ab303da48d799ff1ac40d5ac37c67e'),
                   (0, 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]:
 
-            response = self.app.get(url(controller='files', action='index',
-                                    repo_name=HG_REPO,
+            response = self.app.get(base.url(controller='files', action='index',
+                                    repo_name=base.HG_REPO,
                                     revision=r[1],
                                     f_path='/'))
 
@@ -98,8 +99,8 @@
         import kallithea.lib.helpers
         kallithea.lib.helpers._urlify_issues_f = None
         self.log_user()
-        response = self.app.get(url(controller='files', action='index',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='index',
+                                    repo_name=base.HG_REPO,
                                     revision='8911406ad776fdd3d0b9932a2e89677e57405a48',
                                     f_path='vcs/nodes.py'))
 
@@ -114,93 +115,93 @@
 
     def test_file_source_history(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='history',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='history',
+                                    repo_name=base.HG_REPO,
                                     revision='tip',
                                     f_path='vcs/nodes.py'),
                                 extra_environ={'HTTP_X_PARTIAL_XHR': '1'},)
-        assert response.body == HG_NODE_HISTORY
+        assert json.loads(response.body) == json.loads(HG_NODE_HISTORY)
 
     def test_file_source_history_git(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='history',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url(controller='files', action='history',
+                                    repo_name=base.GIT_REPO,
                                     revision='master',
                                     f_path='vcs/nodes.py'),
                                 extra_environ={'HTTP_X_PARTIAL_XHR': '1'},)
-        assert response.body == GIT_NODE_HISTORY
+        assert json.loads(response.body) == json.loads(GIT_NODE_HISTORY)
 
     def test_file_annotation(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='index',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='index',
+                                    repo_name=base.HG_REPO,
                                     revision='tip',
                                     f_path='vcs/nodes.py',
-                                    annotate=True))
+                                    annotate='1'))
 
         response.mustcontain("""r356:25213a5fbb04""")
 
     def test_file_annotation_git(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='index',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url(controller='files', action='index',
+                                    repo_name=base.GIT_REPO,
                                     revision='master',
                                     f_path='vcs/nodes.py',
-                                    annotate=True))
+                                    annotate='1'))
         response.mustcontain("""r345:c994f0de03b2""")
 
     def test_file_annotation_history(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='history',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='history',
+                                    repo_name=base.HG_REPO,
                                     revision='tip',
                                     f_path='vcs/nodes.py',
-                                    annotate=True),
+                                    annotate='1'),
                                 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
 
-        assert response.body == HG_NODE_HISTORY
+        assert json.loads(response.body) == json.loads(HG_NODE_HISTORY)
 
     def test_file_annotation_history_git(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='history',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url(controller='files', action='history',
+                                    repo_name=base.GIT_REPO,
                                     revision='master',
                                     f_path='vcs/nodes.py',
                                     annotate=True),
                                 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
 
-        assert response.body == GIT_NODE_HISTORY
+        assert json.loads(response.body) == json.loads(GIT_NODE_HISTORY)
 
     def test_file_authors(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='authors',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='authors',
+                                    repo_name=base.HG_REPO,
                                     revision='tip',
                                     f_path='vcs/nodes.py',
-                                    annotate=True))
+                                    annotate='1'))
         response.mustcontain('Marcin Kuzminski')
         response.mustcontain('Lukasz Balcerzak')
 
     def test_file_authors_git(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='authors',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url(controller='files', action='authors',
+                                    repo_name=base.GIT_REPO,
                                     revision='master',
                                     f_path='vcs/nodes.py',
-                                    annotate=True))
+                                    annotate='1'))
         response.mustcontain('Marcin Kuzminski')
         response.mustcontain('Lukasz Balcerzak')
 
     def test_archival(self):
         self.log_user()
-        _set_downloads(HG_REPO, set_to=True)
+        _set_downloads(base.HG_REPO, set_to=True)
         for arch_ext, info in ARCHIVE_SPECS.items():
             short = '27cd5cce30c9%s' % arch_ext
             fname = '27cd5cce30c96924232dffcd24178a07ffeb5dfc%s' % arch_ext
-            filename = '%s-%s' % (HG_REPO, short)
-            response = self.app.get(url(controller='files',
+            filename = '%s-%s' % (base.HG_REPO, short)
+            response = self.app.get(base.url(controller='files',
                                         action='archivefile',
-                                        repo_name=HG_REPO,
+                                        repo_name=base.HG_REPO,
                                         fname=fname))
 
             assert response.status == '200 OK'
@@ -210,29 +211,29 @@
                 ('Content-Disposition', 'attachment; filename=%s' % filename),
                 ('Content-Type', info[0]),
             ]
-            assert response.response._headers.items() == heads
+            assert sorted(response.response._headers.items()) == sorted(heads)
 
     def test_archival_wrong_ext(self):
         self.log_user()
-        _set_downloads(HG_REPO, set_to=True)
+        _set_downloads(base.HG_REPO, set_to=True)
         for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']:
             fname = '27cd5cce30c96924232dffcd24178a07ffeb5dfc%s' % arch_ext
 
-            response = self.app.get(url(controller='files',
+            response = self.app.get(base.url(controller='files',
                                         action='archivefile',
-                                        repo_name=HG_REPO,
+                                        repo_name=base.HG_REPO,
                                         fname=fname))
             response.mustcontain('Unknown archive type')
 
     def test_archival_wrong_revision(self):
         self.log_user()
-        _set_downloads(HG_REPO, set_to=True)
+        _set_downloads(base.HG_REPO, set_to=True)
         for rev in ['00x000000', 'tar', 'wrong', '@##$@$42413232', '232dffcd']:
             fname = '%s.zip' % rev
 
-            response = self.app.get(url(controller='files',
+            response = self.app.get(base.url(controller='files',
                                         action='archivefile',
-                                        repo_name=HG_REPO,
+                                        repo_name=base.HG_REPO,
                                         fname=fname))
             response.mustcontain('Unknown revision')
 
@@ -241,8 +242,8 @@
     #==========================================================================
     def test_raw_file_ok(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='rawfile',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='rawfile',
+                                    repo_name=base.HG_REPO,
                                     revision='27cd5cce30c96924232dffcd24178a07ffeb5dfc',
                                     f_path='vcs/nodes.py'))
 
@@ -251,11 +252,11 @@
 
     def test_raw_file_wrong_cs(self):
         self.log_user()
-        rev = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
+        rev = 'ERRORce30c96924232dffcd24178a07ffeb5dfc'
         f_path = 'vcs/nodes.py'
 
-        response = self.app.get(url(controller='files', action='rawfile',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='rawfile',
+                                    repo_name=base.HG_REPO,
                                     revision=rev,
                                     f_path=f_path), status=404)
 
@@ -266,12 +267,12 @@
         self.log_user()
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
         f_path = 'vcs/ERRORnodes.py'
-        response = self.app.get(url(controller='files', action='rawfile',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='rawfile',
+                                    repo_name=base.HG_REPO,
                                     revision=rev,
                                     f_path=f_path), status=404)
 
-        msg = "There is no file nor directory at the given path: &#39;%s&#39; at revision %s" % (f_path, rev[:12])
+        msg = "There is no file nor directory at the given path: &apos;%s&apos; at revision %s" % (f_path, rev[:12])
         response.mustcontain(msg)
 
     #==========================================================================
@@ -279,8 +280,8 @@
     #==========================================================================
     def test_raw_ok(self):
         self.log_user()
-        response = self.app.get(url(controller='files', action='raw',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='raw',
+                                    repo_name=base.HG_REPO,
                                     revision='27cd5cce30c96924232dffcd24178a07ffeb5dfc',
                                     f_path='vcs/nodes.py'))
 
@@ -288,11 +289,11 @@
 
     def test_raw_wrong_cs(self):
         self.log_user()
-        rev = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
+        rev = 'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
         f_path = 'vcs/nodes.py'
 
-        response = self.app.get(url(controller='files', action='raw',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='raw',
+                                    repo_name=base.HG_REPO,
                                     revision=rev,
                                     f_path=f_path), status=404)
 
@@ -303,18 +304,18 @@
         self.log_user()
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
         f_path = 'vcs/ERRORnodes.py'
-        response = self.app.get(url(controller='files', action='raw',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url(controller='files', action='raw',
+                                    repo_name=base.HG_REPO,
                                     revision=rev,
                                     f_path=f_path), status=404)
-        msg = "There is no file nor directory at the given path: &#39;%s&#39; at revision %s" % (f_path, rev[:12])
+        msg = "There is no file nor directory at the given path: &apos;%s&apos; at revision %s" % (f_path, rev[:12])
         response.mustcontain(msg)
 
     def test_ajaxed_files_list(self):
         self.log_user()
         rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
         response = self.app.get(
-            url('files_nodelist_home', repo_name=HG_REPO, f_path='/',
+            base.url('files_nodelist_home', repo_name=base.HG_REPO, f_path='/',
                 revision=rev),
             extra_environ={'HTTP_X_PARTIAL_XHR': '1'},
         )
@@ -323,14 +324,14 @@
     # Hg - ADD FILE
     def test_add_file_view_hg(self):
         self.log_user()
-        response = self.app.get(url('files_add_home',
-                                      repo_name=HG_REPO,
+        response = self.app.get(base.url('files_add_home',
+                                      repo_name=base.HG_REPO,
                                       revision='tip', f_path='/'))
 
     def test_add_file_into_hg_missing_content(self):
         self.log_user()
-        response = self.app.post(url('files_add_home',
-                                      repo_name=HG_REPO,
+        response = self.app.post(base.url('files_add_home',
+                                      repo_name=base.HG_REPO,
                                       revision='tip', f_path='/'),
                                  params={
                                     'content': '',
@@ -342,8 +343,8 @@
 
     def test_add_file_into_hg_missing_filename(self):
         self.log_user()
-        response = self.app.post(url('files_add_home',
-                                      repo_name=HG_REPO,
+        response = self.app.post(base.url('files_add_home',
+                                      repo_name=base.HG_REPO,
                                       revision='tip', f_path='/'),
                                  params={
                                     'content': "foo",
@@ -353,15 +354,15 @@
 
         self.checkSessionFlash(response, 'No filename')
 
-    @parametrize('location,filename', [
+    @base.parametrize('location,filename', [
         ('/abs', 'foo'),
         ('../rel', 'foo'),
         ('file/../foo', 'foo'),
     ])
     def test_add_file_into_hg_bad_filenames(self, location, filename):
         self.log_user()
-        response = self.app.post(url('files_add_home',
-                                      repo_name=HG_REPO,
+        response = self.app.post(base.url('files_add_home',
+                                      repo_name=base.HG_REPO,
                                       revision='tip', f_path='/'),
                                  params={
                                     'content': "foo",
@@ -373,15 +374,15 @@
 
         self.checkSessionFlash(response, 'Location must be relative path and must not contain .. in path')
 
-    @parametrize('cnt,location,filename', [
+    @base.parametrize('cnt,location,filename', [
         (1, '', 'foo.txt'),
         (2, 'dir', 'foo.rst'),
         (3, 'rel/dir', 'foo.bar'),
     ])
     def test_add_file_into_hg(self, cnt, location, filename):
         self.log_user()
-        repo = fixture.create_repo(u'commit-test-%s' % cnt, repo_type='hg')
-        response = self.app.post(url('files_add_home',
+        repo = fixture.create_repo('commit-test-%s' % cnt, repo_type='hg')
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip', f_path='/'),
                                  params={
@@ -400,14 +401,14 @@
     # Git - add file
     def test_add_file_view_git(self):
         self.log_user()
-        response = self.app.get(url('files_add_home',
-                                      repo_name=GIT_REPO,
+        response = self.app.get(base.url('files_add_home',
+                                      repo_name=base.GIT_REPO,
                                       revision='tip', f_path='/'))
 
     def test_add_file_into_git_missing_content(self):
         self.log_user()
-        response = self.app.post(url('files_add_home',
-                                      repo_name=GIT_REPO,
+        response = self.app.post(base.url('files_add_home',
+                                      repo_name=base.GIT_REPO,
                                       revision='tip', f_path='/'),
                                  params={
                                      'content': '',
@@ -418,8 +419,8 @@
 
     def test_add_file_into_git_missing_filename(self):
         self.log_user()
-        response = self.app.post(url('files_add_home',
-                                      repo_name=GIT_REPO,
+        response = self.app.post(base.url('files_add_home',
+                                      repo_name=base.GIT_REPO,
                                       revision='tip', f_path='/'),
                                  params={
                                     'content': "foo",
@@ -429,15 +430,15 @@
 
         self.checkSessionFlash(response, 'No filename')
 
-    @parametrize('location,filename', [
+    @base.parametrize('location,filename', [
         ('/abs', 'foo'),
         ('../rel', 'foo'),
         ('file/../foo', 'foo'),
     ])
     def test_add_file_into_git_bad_filenames(self, location, filename):
         self.log_user()
-        response = self.app.post(url('files_add_home',
-                                      repo_name=GIT_REPO,
+        response = self.app.post(base.url('files_add_home',
+                                      repo_name=base.GIT_REPO,
                                       revision='tip', f_path='/'),
                                  params={
                                     'content': "foo",
@@ -449,15 +450,15 @@
 
         self.checkSessionFlash(response, 'Location must be relative path and must not contain .. in path')
 
-    @parametrize('cnt,location,filename', [
+    @base.parametrize('cnt,location,filename', [
         (1, '', 'foo.txt'),
         (2, 'dir', 'foo.rst'),
         (3, 'rel/dir', 'foo.bar'),
     ])
     def test_add_file_into_git(self, cnt, location, filename):
         self.log_user()
-        repo = fixture.create_repo(u'commit-test-%s' % cnt, repo_type='git')
-        response = self.app.post(url('files_add_home',
+        repo = fixture.create_repo('commit-test-%s' % cnt, repo_type='git')
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip', f_path='/'),
                                  params={
@@ -476,18 +477,27 @@
     # Hg - EDIT
     def test_edit_file_view_hg(self):
         self.log_user()
-        response = self.app.get(url('files_edit_home',
-                                      repo_name=HG_REPO,
+        response = self.app.get(base.url('files_edit_home',
+                                      repo_name=base.HG_REPO,
                                       revision='tip', f_path='vcs/nodes.py'))
+        # Odd error when on tip ...
+        self.checkSessionFlash(response, "You can only edit files with revision being a valid branch")
+        assert b"Commit Message" not in response.body
+
+        # Specify branch head revision to avoid "valid branch" error and get coverage of edit form
+        response = self.app.get(base.url('files_edit_home',
+                                      repo_name=base.HG_REPO,
+                                      revision='96507bd11ecc815ebc6270fdf6db110928c09c1e', f_path='vcs/nodes.py'))
+        assert b"Commit Message" in response.body
 
     def test_edit_file_view_not_on_branch_hg(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-edit-repo', repo_type='hg')
+        repo = fixture.create_repo('test-edit-repo', repo_type='hg')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip', f_path='/'),
                                  params={
@@ -501,7 +511,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.get(url('files_edit_home',
+            response = self.app.get(base.url('files_edit_home',
                                           repo_name=repo.repo_name,
                                           revision='tip', f_path=posixpath.join(location, filename)),
                                     status=302)
@@ -512,12 +522,12 @@
 
     def test_edit_file_view_commit_changes_hg(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-edit-repo', repo_type='hg')
+        repo = fixture.create_repo('test-edit-repo', repo_type='hg')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip',
                                       f_path='/'),
@@ -532,7 +542,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.post(url('files_edit_home',
+            response = self.app.post(base.url('files_edit_home',
                                           repo_name=repo.repo_name,
                                           revision=repo.scm_instance.DEFAULT_BRANCH_NAME,
                                           f_path=posixpath.join(location, filename)),
@@ -550,18 +560,18 @@
     # Git - edit
     def test_edit_file_view_git(self):
         self.log_user()
-        response = self.app.get(url('files_edit_home',
-                                      repo_name=GIT_REPO,
+        response = self.app.get(base.url('files_edit_home',
+                                      repo_name=base.GIT_REPO,
                                       revision='tip', f_path='vcs/nodes.py'))
 
     def test_edit_file_view_not_on_branch_git(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-edit-repo', repo_type='git')
+        repo = fixture.create_repo('test-edit-repo', repo_type='git')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip', f_path='/'),
                                  params={
@@ -575,7 +585,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.get(url('files_edit_home',
+            response = self.app.get(base.url('files_edit_home',
                                           repo_name=repo.repo_name,
                                           revision='tip', f_path=posixpath.join(location, filename)),
                                     status=302)
@@ -586,12 +596,12 @@
 
     def test_edit_file_view_commit_changes_git(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-edit-repo', repo_type='git')
+        repo = fixture.create_repo('test-edit-repo', repo_type='git')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip',
                                       f_path='/'),
@@ -606,7 +616,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.post(url('files_edit_home',
+            response = self.app.post(base.url('files_edit_home',
                                           repo_name=repo.repo_name,
                                           revision=repo.scm_instance.DEFAULT_BRANCH_NAME,
                                           f_path=posixpath.join(location, filename)),
@@ -624,18 +634,18 @@
     # Hg - delete
     def test_delete_file_view_hg(self):
         self.log_user()
-        response = self.app.get(url('files_delete_home',
-                                     repo_name=HG_REPO,
+        response = self.app.get(base.url('files_delete_home',
+                                     repo_name=base.HG_REPO,
                                      revision='tip', f_path='vcs/nodes.py'))
 
     def test_delete_file_view_not_on_branch_hg(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-delete-repo', repo_type='hg')
+        repo = fixture.create_repo('test-delete-repo', repo_type='hg')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip', f_path='/'),
                                  params={
@@ -649,7 +659,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.get(url('files_delete_home',
+            response = self.app.get(base.url('files_delete_home',
                                           repo_name=repo.repo_name,
                                           revision='tip', f_path=posixpath.join(location, filename)),
                                     status=302)
@@ -660,12 +670,12 @@
 
     def test_delete_file_view_commit_changes_hg(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-delete-repo', repo_type='hg')
+        repo = fixture.create_repo('test-delete-repo', repo_type='hg')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip',
                                       f_path='/'),
@@ -680,7 +690,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.post(url('files_delete_home',
+            response = self.app.post(base.url('files_delete_home',
                                           repo_name=repo.repo_name,
                                           revision=repo.scm_instance.DEFAULT_BRANCH_NAME,
                                           f_path=posixpath.join(location, filename)),
@@ -697,18 +707,18 @@
     # Git - delete
     def test_delete_file_view_git(self):
         self.log_user()
-        response = self.app.get(url('files_delete_home',
-                                     repo_name=HG_REPO,
+        response = self.app.get(base.url('files_delete_home',
+                                     repo_name=base.HG_REPO,
                                      revision='tip', f_path='vcs/nodes.py'))
 
     def test_delete_file_view_not_on_branch_git(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-delete-repo', repo_type='git')
+        repo = fixture.create_repo('test-delete-repo', repo_type='git')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip', f_path='/'),
                                  params={
@@ -722,7 +732,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.get(url('files_delete_home',
+            response = self.app.get(base.url('files_delete_home',
                                           repo_name=repo.repo_name,
                                           revision='tip', f_path=posixpath.join(location, filename)),
                                     status=302)
@@ -733,12 +743,12 @@
 
     def test_delete_file_view_commit_changes_git(self):
         self.log_user()
-        repo = fixture.create_repo(u'test-delete-repo', repo_type='git')
+        repo = fixture.create_repo('test-delete-repo', repo_type='git')
 
         ## add file
         location = 'vcs'
         filename = 'nodes.py'
-        response = self.app.post(url('files_add_home',
+        response = self.app.post(base.url('files_add_home',
                                       repo_name=repo.repo_name,
                                       revision='tip',
                                       f_path='/'),
@@ -753,7 +763,7 @@
         try:
             self.checkSessionFlash(response, 'Successfully committed to %s'
                                    % posixpath.join(location, filename))
-            response = self.app.post(url('files_delete_home',
+            response = self.app.post(base.url('files_delete_home',
                                           repo_name=repo.repo_name,
                                           revision=repo.scm_instance.DEFAULT_BRANCH_NAME,
                                           f_path=posixpath.join(location, filename)),
@@ -769,16 +779,16 @@
 
     def test_png_diff_no_crash_hg(self):
         self.log_user()
-        response = self.app.get(url('files_diff_home',
-                                    repo_name=HG_REPO,
+        response = self.app.get(base.url('files_diff_home',
+                                    repo_name=base.HG_REPO,
                                     f_path='docs/theme/ADC/static/documentation.png',
                                     diff1='tip', diff2='tip'))
         response.mustcontain("""<pre>Binary file</pre>""")
 
     def test_png_diff_no_crash_git(self):
         self.log_user()
-        response = self.app.get(url('files_diff_home',
-                                    repo_name=GIT_REPO,
+        response = self.app.get(base.url('files_diff_home',
+                                    repo_name=base.GIT_REPO,
                                     f_path='docs/theme/ADC/static/documentation.png',
                                     diff1='master', diff2='master'))
         response.mustcontain("""<pre>Binary file</pre>""")
--- a/kallithea/tests/functional/test_followers.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_followers.py	Sat May 02 21:20:43 2020 +0200
@@ -1,24 +1,24 @@
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestFollowersController(TestController):
+class TestFollowersController(base.TestController):
 
     def test_index_hg(self):
         self.log_user()
-        repo_name = HG_REPO
-        response = self.app.get(url(controller='followers',
+        repo_name = base.HG_REPO
+        response = self.app.get(base.url(controller='followers',
                                     action='followers',
                                     repo_name=repo_name))
 
-        response.mustcontain(TEST_USER_ADMIN_LOGIN)
+        response.mustcontain(base.TEST_USER_ADMIN_LOGIN)
         response.mustcontain("""Started following""")
 
     def test_index_git(self):
         self.log_user()
-        repo_name = GIT_REPO
-        response = self.app.get(url(controller='followers',
+        repo_name = base.GIT_REPO
+        response = self.app.get(base.url(controller='followers',
                                     action='followers',
                                     repo_name=repo_name))
 
-        response.mustcontain(TEST_USER_ADMIN_LOGIN)
+        response.mustcontain(base.TEST_USER_ADMIN_LOGIN)
         response.mustcontain("""Started following""")
--- a/kallithea/tests/functional/test_forks.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_forks.py	Sat May 02 21:20:43 2020 +0200
@@ -1,20 +1,19 @@
 # -*- coding: utf-8 -*-
 
-import urllib
+import urllib.parse
 
-from kallithea.lib.utils2 import safe_str, safe_unicode
 from kallithea.model.db import Repository, User
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.user import UserModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class _BaseTestCase(TestController):
+class _BaseTestCase(base.TestController):
     """
     Write all tests here
     """
@@ -24,9 +23,9 @@
     REPO_FORK = None
 
     def setup_method(self, method):
-        self.username = u'forkuser'
-        self.password = u'qweqwe'
-        u1 = fixture.create_user(self.username, password=self.password, email=u'fork_king@example.com')
+        self.username = 'forkuser'
+        self.password = 'qweqwe'
+        u1 = fixture.create_user(self.username, password=self.password, email='fork_king@example.com')
         self.u1_id = u1.user_id
         Session().commit()
 
@@ -37,13 +36,13 @@
     def test_index(self):
         self.log_user()
         repo_name = self.REPO
-        response = self.app.get(url(controller='forks', action='forks',
+        response = self.app.get(base.url(controller='forks', action='forks',
                                     repo_name=repo_name))
 
         response.mustcontain("""There are no forks yet""")
 
     def test_no_permissions_to_fork(self):
-        self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)['user_id']
+        self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)['user_id']
         try:
             user_model = UserModel()
             usr = User.get_default_user()
@@ -52,7 +51,7 @@
             Session().commit()
             # try create a fork
             repo_name = self.REPO
-            self.app.post(url(controller='forks', action='fork_create',
+            self.app.post(base.url(controller='forks', action='fork_create',
                               repo_name=repo_name), {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=403)
         finally:
             usr = User.get_default_user()
@@ -70,7 +69,7 @@
         org_repo = Repository.get_by_repo_name(repo_name)
         creation_args = {
             'repo_name': fork_name,
-            'repo_group': u'-1',
+            'repo_group': '-1',
             'fork_parent_id': org_repo.repo_id,
             'repo_type': self.REPO_TYPE,
             'description': description,
@@ -78,10 +77,10 @@
             'landing_rev': 'rev:tip',
             '_session_csrf_secret_token': self.session_csrf_secret_token()}
 
-        self.app.post(url(controller='forks', action='fork_create',
+        self.app.post(base.url(controller='forks', action='fork_create',
                           repo_name=repo_name), creation_args)
 
-        response = self.app.get(url(controller='forks', action='forks',
+        response = self.app.get(base.url(controller='forks', action='forks',
                                     repo_name=repo_name))
 
         response.mustcontain(
@@ -89,12 +88,12 @@
         )
 
         # remove this fork
-        response = self.app.post(url('delete_repo', repo_name=fork_name),
+        response = self.app.post(base.url('delete_repo', repo_name=fork_name),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
     def test_fork_create_into_group(self):
         self.log_user()
-        group = fixture.create_repo_group(u'vc')
+        group = fixture.create_repo_group('vc')
         group_id = group.group_id
         fork_name = self.REPO_FORK
         fork_name_full = 'vc/%s' % fork_name
@@ -110,13 +109,13 @@
             'private': 'False',
             'landing_rev': 'rev:tip',
             '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        self.app.post(url(controller='forks', action='fork_create',
+        self.app.post(base.url(controller='forks', action='fork_create',
                           repo_name=repo_name), creation_args)
         repo = Repository.get_by_repo_name(fork_name_full)
         assert repo.fork.repo_name == self.REPO
 
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=fork_name_full))
+        response = self.app.get(base.url('repo_check_home', repo_name=fork_name_full))
         # test if we have a message that fork is ok
         self.checkSessionFlash(response,
                 'Forked repository %s as <a href="/%s">%s</a>'
@@ -130,7 +129,7 @@
         assert fork_repo.fork.repo_name == repo_name
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=fork_name_full))
+        response = self.app.get(base.url('summary_home', repo_name=fork_name_full))
         response.mustcontain(fork_name_full)
         response.mustcontain(self.REPO_TYPE)
         response.mustcontain('Fork of "<a href="/%s">%s</a>"' % (repo_name, repo_name))
@@ -144,49 +143,49 @@
         # create a fork
         repo_name = self.REPO
         org_repo = Repository.get_by_repo_name(repo_name)
-        fork_name = safe_str(self.REPO_FORK + u'-rødgrød')
+        fork_name = self.REPO_FORK + '-rødgrød'
         creation_args = {
             'repo_name': fork_name,
-            'repo_group': u'-1',
+            'repo_group': '-1',
             'fork_parent_id': org_repo.repo_id,
             'repo_type': self.REPO_TYPE,
             'description': 'unicode repo 1',
             'private': 'False',
             'landing_rev': 'rev:tip',
             '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        self.app.post(url(controller='forks', action='fork_create',
+        self.app.post(base.url(controller='forks', action='fork_create',
                           repo_name=repo_name), creation_args)
-        response = self.app.get(url(controller='forks', action='forks',
+        response = self.app.get(base.url(controller='forks', action='forks',
                                     repo_name=repo_name))
         response.mustcontain(
-            """<a href="/%s">%s</a>""" % (urllib.quote(fork_name), fork_name)
+            """<a href="/%s">%s</a>""" % (urllib.parse.quote(fork_name), fork_name)
         )
-        fork_repo = Repository.get_by_repo_name(safe_unicode(fork_name))
+        fork_repo = Repository.get_by_repo_name(fork_name)
         assert fork_repo
 
         # fork the fork
-        fork_name_2 = safe_str(self.REPO_FORK + u'-blåbærgrød')
+        fork_name_2 = self.REPO_FORK + '-blåbærgrød'
         creation_args = {
             'repo_name': fork_name_2,
-            'repo_group': u'-1',
+            'repo_group': '-1',
             'fork_parent_id': fork_repo.repo_id,
             'repo_type': self.REPO_TYPE,
             'description': 'unicode repo 2',
             'private': 'False',
             'landing_rev': 'rev:tip',
             '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        self.app.post(url(controller='forks', action='fork_create',
+        self.app.post(base.url(controller='forks', action='fork_create',
                           repo_name=fork_name), creation_args)
-        response = self.app.get(url(controller='forks', action='forks',
+        response = self.app.get(base.url(controller='forks', action='forks',
                                     repo_name=fork_name))
         response.mustcontain(
-            """<a href="/%s">%s</a>""" % (urllib.quote(fork_name_2), fork_name_2)
+            """<a href="/%s">%s</a>""" % (urllib.parse.quote(fork_name_2), fork_name_2)
         )
 
         # remove these forks
-        response = self.app.post(url('delete_repo', repo_name=fork_name_2),
+        response = self.app.post(base.url('delete_repo', repo_name=fork_name_2),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
-        response = self.app.post(url('delete_repo', repo_name=fork_name),
+        response = self.app.post(base.url('delete_repo', repo_name=fork_name),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
     def test_fork_create_and_permissions(self):
@@ -197,20 +196,20 @@
         org_repo = Repository.get_by_repo_name(repo_name)
         creation_args = {
             'repo_name': fork_name,
-            'repo_group': u'-1',
+            'repo_group': '-1',
             'fork_parent_id': org_repo.repo_id,
             'repo_type': self.REPO_TYPE,
             'description': description,
             'private': 'False',
             'landing_rev': 'rev:tip',
             '_session_csrf_secret_token': self.session_csrf_secret_token()}
-        self.app.post(url(controller='forks', action='fork_create',
+        self.app.post(base.url(controller='forks', action='fork_create',
                           repo_name=repo_name), creation_args)
         repo = Repository.get_by_repo_name(self.REPO_FORK)
         assert repo.fork.repo_name == self.REPO
 
         ## run the check page that triggers the flash message
-        response = self.app.get(url('repo_check_home', repo_name=fork_name))
+        response = self.app.get(base.url('repo_check_home', repo_name=fork_name))
         # test if we have a message that fork is ok
         self.checkSessionFlash(response,
                 'Forked repository %s as <a href="/%s">%s</a>'
@@ -224,7 +223,7 @@
         assert fork_repo.fork.repo_name == repo_name
 
         # test if the repository is visible in the list ?
-        response = self.app.get(url('summary_home', repo_name=fork_name))
+        response = self.app.get(base.url('summary_home', repo_name=fork_name))
         response.mustcontain(fork_name)
         response.mustcontain(self.REPO_TYPE)
         response.mustcontain('Fork of "<a href="/%s">%s</a>"' % (repo_name, repo_name))
@@ -242,7 +241,7 @@
                                           perm='repository.read')
         Session().commit()
 
-        response = self.app.get(url(controller='forks', action='forks',
+        response = self.app.get(base.url(controller='forks', action='forks',
                                     repo_name=repo_name))
 
         response.mustcontain('<div>fork of vcs test</div>')
@@ -257,7 +256,7 @@
             Session().commit()
 
             # fork shouldn't be visible
-            response = self.app.get(url(controller='forks', action='forks',
+            response = self.app.get(base.url(controller='forks', action='forks',
                                         repo_name=repo_name))
             response.mustcontain('There are no forks yet')
 
@@ -270,14 +269,14 @@
 
 
 class TestGIT(_BaseTestCase):
-    REPO = GIT_REPO
-    NEW_REPO = NEW_GIT_REPO
+    REPO = base.GIT_REPO
+    NEW_REPO = base.NEW_GIT_REPO
     REPO_TYPE = 'git'
-    REPO_FORK = GIT_FORK
+    REPO_FORK = base.GIT_FORK
 
 
 class TestHG(_BaseTestCase):
-    REPO = HG_REPO
-    NEW_REPO = NEW_HG_REPO
+    REPO = base.HG_REPO
+    NEW_REPO = base.NEW_HG_REPO
     REPO_TYPE = 'hg'
-    REPO_FORK = HG_FORK
+    REPO_FORK = base.HG_FORK
--- a/kallithea/tests/functional/test_home.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_home.py	Sat May 02 21:20:43 2020 +0200
@@ -4,18 +4,18 @@
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestHomeController(TestController):
+class TestHomeController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url(controller='home', action='index'))
+        response = self.app.get(base.url(controller='home', action='index'))
         # if global permission is set
         response.mustcontain('Add Repository')
 
@@ -28,60 +28,60 @@
         )
 
         # html in javascript variable:
-        response.mustcontain(r'href=\"/%s\"' % HG_REPO)
+        response.mustcontain(r'href=\"/%s\"' % base.HG_REPO)
 
         response.mustcontain(r'\x3ci class=\"icon-globe\"')
 
         response.mustcontain(r'\"fixes issue with having custom format for git-log\n\"')
-        response.mustcontain(r'\"/%s/changeset/5f2c6ee195929b0be80749243c18121c9864a3b3\"' % GIT_REPO)
+        response.mustcontain(r'\"/%s/changeset/5f2c6ee195929b0be80749243c18121c9864a3b3\"' % base.GIT_REPO)
 
         response.mustcontain(r'\"disable security checks on hg clone for travis\"')
-        response.mustcontain(r'\"/%s/changeset/96507bd11ecc815ebc6270fdf6db110928c09c1e\"' % HG_REPO)
+        response.mustcontain(r'\"/%s/changeset/96507bd11ecc815ebc6270fdf6db110928c09c1e\"' % base.HG_REPO)
 
     def test_repo_summary_with_anonymous_access_disabled(self):
         with fixture.anon_access(False):
-            response = self.app.get(url(controller='summary',
-                                        action='index', repo_name=HG_REPO),
+            response = self.app.get(base.url(controller='summary',
+                                        action='index', repo_name=base.HG_REPO),
                                         status=302)
             assert 'login' in response.location
 
     def test_index_with_anonymous_access_disabled(self):
         with fixture.anon_access(False):
-            response = self.app.get(url(controller='home', action='index'),
+            response = self.app.get(base.url(controller='home', action='index'),
                                     status=302)
             assert 'login' in response.location
 
     def test_index_page_on_groups(self):
         self.log_user()
-        gr = fixture.create_repo_group(u'gr1')
-        fixture.create_repo(name=u'gr1/repo_in_group', repo_group=gr)
-        response = self.app.get(url('repos_group_home', group_name=u'gr1'))
+        gr = fixture.create_repo_group('gr1')
+        fixture.create_repo(name='gr1/repo_in_group', repo_group=gr)
+        response = self.app.get(base.url('repos_group_home', group_name='gr1'))
 
         try:
-            response.mustcontain(u"gr1/repo_in_group")
+            response.mustcontain("gr1/repo_in_group")
         finally:
-            RepoModel().delete(u'gr1/repo_in_group')
-            RepoGroupModel().delete(repo_group=u'gr1', force_delete=True)
+            RepoModel().delete('gr1/repo_in_group')
+            RepoGroupModel().delete(repo_group='gr1', force_delete=True)
             Session().commit()
 
     def test_users_and_groups_data(self):
-        fixture.create_user('evil', firstname=u'D\'o\'ct"o"r', lastname=u'Évíl')
-        fixture.create_user_group(u'grrrr', user_group_description=u"Groüp")
-        response = self.app.get(url('users_and_groups_data', query=u'evi'))
+        fixture.create_user('evil', firstname='D\'o\'ct"o"r', lastname='Évíl')
+        fixture.create_user_group('grrrr', user_group_description="Groüp")
+        response = self.app.get(base.url('users_and_groups_data', query='evi'))
         assert response.status_code == 302
-        assert url('login_home') in response.location
-        self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
-        response = self.app.get(url('users_and_groups_data', query=u'evi'))
+        assert base.url('login_home') in response.location
+        self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
+        response = self.app.get(base.url('users_and_groups_data', query='evi'))
         result = json.loads(response.body)['results']
-        assert result[0].get('fname') == u'D\'o\'ct"o"r'
-        assert result[0].get('lname') == u'Évíl'
-        response = self.app.get(url('users_and_groups_data', key=u'evil'))
+        assert result[0].get('fname') == 'D\'o\'ct"o"r'
+        assert result[0].get('lname') == 'Évíl'
+        response = self.app.get(base.url('users_and_groups_data', key='evil'))
         result = json.loads(response.body)['results']
-        assert result[0].get('fname') == u'D\'o\'ct"o"r'
-        assert result[0].get('lname') == u'Évíl'
-        response = self.app.get(url('users_and_groups_data', query=u'rrrr'))
+        assert result[0].get('fname') == 'D\'o\'ct"o"r'
+        assert result[0].get('lname') == 'Évíl'
+        response = self.app.get(base.url('users_and_groups_data', query='rrrr'))
         result = json.loads(response.body)['results']
         assert not result
-        response = self.app.get(url('users_and_groups_data', types='users,groups', query=u'rrrr'))
+        response = self.app.get(base.url('users_and_groups_data', types='users,groups', query='rrrr'))
         result = json.loads(response.body)['results']
-        assert result[0].get('grname') == u'grrrr'
+        assert result[0].get('grname') == 'grrrr'
--- a/kallithea/tests/functional/test_journal.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_journal.py	Sat May 02 21:20:43 2020 +0200
@@ -1,13 +1,13 @@
 import datetime
 
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestJournalController(TestController):
+class TestJournalController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url(controller='journal', action='index'))
+        response = self.app.get(base.url(controller='journal', action='index'))
 
         response.mustcontain("""<h4>%s</h4>""" % datetime.date.today())
 
@@ -22,18 +22,18 @@
 #
 #        assert len(followings) == 1, 'Not following any repository'
 #
-#        response = self.app.post(url(controller='journal',
+#        response = self.app.post(base.url(controller='journal',
 #                                     action='toggle_following'),
 #                                     {'follows_repository_id':repo.repo_id})
 
     def test_start_following_repository(self):
         self.log_user()
-        response = self.app.get(url(controller='journal', action='index'),)
+        response = self.app.get(base.url(controller='journal', action='index'),)
 
     def test_public_journal_atom(self):
         self.log_user()
-        response = self.app.get(url(controller='journal', action='public_journal_atom'),)
+        response = self.app.get(base.url(controller='journal', action='public_journal_atom'),)
 
     def test_public_journal_rss(self):
         self.log_user()
-        response = self.app.get(url(controller='journal', action='public_journal_rss'),)
+        response = self.app.get(base.url(controller='journal', action='public_journal_rss'),)
--- a/kallithea/tests/functional/test_login.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_login.py	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,7 @@
 # -*- coding: utf-8 -*-
 import re
 import time
-import urlparse
+import urllib.parse
 
 import mock
 from tg.util.webtest import test_context
@@ -15,61 +15,61 @@
 from kallithea.model.db import User
 from kallithea.model.meta import Session
 from kallithea.model.user import UserModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestLoginController(TestController):
+class TestLoginController(base.TestController):
 
     def test_index(self):
-        response = self.app.get(url(controller='login', action='index'))
+        response = self.app.get(base.url(controller='login', action='index'))
         assert response.status == '200 OK'
         # Test response...
 
     def test_login_admin_ok(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_ADMIN_LOGIN,
-                                  'password': TEST_USER_ADMIN_PASS,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_ADMIN_LOGIN,
+                                  'password': base.TEST_USER_ADMIN_PASS,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         assert response.status == '302 Found'
-        self.assert_authenticated_user(response, TEST_USER_ADMIN_LOGIN)
+        self.assert_authenticated_user(response, base.TEST_USER_ADMIN_LOGIN)
 
         response = response.follow()
-        response.mustcontain('/%s' % HG_REPO)
+        response.mustcontain('/%s' % base.HG_REPO)
 
     def test_login_regular_ok(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_REGULAR_LOGIN,
-                                  'password': TEST_USER_REGULAR_PASS,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_REGULAR_LOGIN,
+                                  'password': base.TEST_USER_REGULAR_PASS,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         assert response.status == '302 Found'
-        self.assert_authenticated_user(response, TEST_USER_REGULAR_LOGIN)
+        self.assert_authenticated_user(response, base.TEST_USER_REGULAR_LOGIN)
 
         response = response.follow()
-        response.mustcontain('/%s' % HG_REPO)
+        response.mustcontain('/%s' % base.HG_REPO)
 
     def test_login_regular_email_ok(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_REGULAR_EMAIL,
-                                  'password': TEST_USER_REGULAR_PASS,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_REGULAR_EMAIL,
+                                  'password': base.TEST_USER_REGULAR_PASS,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         assert response.status == '302 Found'
-        self.assert_authenticated_user(response, TEST_USER_REGULAR_LOGIN)
+        self.assert_authenticated_user(response, base.TEST_USER_REGULAR_LOGIN)
 
         response = response.follow()
-        response.mustcontain('/%s' % HG_REPO)
+        response.mustcontain('/%s' % base.HG_REPO)
 
     def test_login_ok_came_from(self):
         test_came_from = '/_admin/users'
-        response = self.app.post(url(controller='login', action='index',
+        response = self.app.post(base.url(controller='login', action='index',
                                      came_from=test_came_from),
-                                 {'username': TEST_USER_ADMIN_LOGIN,
-                                  'password': TEST_USER_ADMIN_PASS,
+                                 {'username': base.TEST_USER_ADMIN_LOGIN,
+                                  'password': base.TEST_USER_ADMIN_PASS,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         assert response.status == '302 Found'
         response = response.follow()
@@ -78,9 +78,9 @@
         response.mustcontain('Users Administration')
 
     def test_login_do_not_remember(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_REGULAR_LOGIN,
-                                  'password': TEST_USER_REGULAR_PASS,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_REGULAR_LOGIN,
+                                  'password': base.TEST_USER_REGULAR_PASS,
                                   'remember': False,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
@@ -89,9 +89,9 @@
             assert not re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r has expiration date, but should be a session cookie' % cookie
 
     def test_login_remember(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_REGULAR_LOGIN,
-                                  'password': TEST_USER_REGULAR_PASS,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_REGULAR_LOGIN,
+                                  'password': base.TEST_USER_REGULAR_PASS,
                                   'remember': True,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
@@ -100,23 +100,23 @@
             assert re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r should have expiration date, but is a session cookie' % cookie
 
     def test_logout(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_REGULAR_LOGIN,
-                                  'password': TEST_USER_REGULAR_PASS,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_REGULAR_LOGIN,
+                                  'password': base.TEST_USER_REGULAR_PASS,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         # Verify that a login session has been established.
-        response = self.app.get(url(controller='login', action='index'))
+        response = self.app.get(base.url(controller='login', action='index'))
         response = response.follow()
         assert 'authuser' in response.session
 
         response.click('Log Out')
 
         # Verify that the login session has been terminated.
-        response = self.app.get(url(controller='login', action='index'))
+        response = self.app.get(base.url(controller='login', action='index'))
         assert 'authuser' not in response.session
 
-    @parametrize('url_came_from', [
+    @base.parametrize('url_came_from', [
           ('data:text/html,<script>window.alert("xss")</script>',),
           ('mailto:test@example.com',),
           ('file:///etc/passwd',),
@@ -128,16 +128,16 @@
           ('non-absolute-path',),
     ])
     def test_login_bad_came_froms(self, url_came_from):
-        response = self.app.post(url(controller='login', action='index',
+        response = self.app.post(base.url(controller='login', action='index',
                                      came_from=url_came_from),
-                                 {'username': TEST_USER_ADMIN_LOGIN,
-                                  'password': TEST_USER_ADMIN_PASS,
+                                 {'username': base.TEST_USER_ADMIN_LOGIN,
+                                  'password': base.TEST_USER_ADMIN_PASS,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()},
                                  status=400)
 
     def test_login_short_password(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_ADMIN_LOGIN,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_ADMIN_LOGIN,
                                   'password': 'as',
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         assert response.status == '200 OK'
@@ -145,7 +145,7 @@
         response.mustcontain('Enter 3 characters or more')
 
     def test_login_wrong_username_password(self):
-        response = self.app.post(url(controller='login', action='index'),
+        response = self.app.post(base.url(controller='login', action='index'),
                                  {'username': 'error',
                                   'password': 'test12',
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -153,8 +153,8 @@
         response.mustcontain('Invalid username or password')
 
     def test_login_non_ascii(self):
-        response = self.app.post(url(controller='login', action='index'),
-                                 {'username': TEST_USER_REGULAR_LOGIN,
+        response = self.app.post(base.url(controller='login', action='index'),
+                                 {'username': base.TEST_USER_REGULAR_LOGIN,
                                   'password': 'blåbærgrød',
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
@@ -162,63 +162,61 @@
 
     # verify that get arguments are correctly passed along login redirection
 
-    @parametrize('args,args_encoded', [
-        ({'foo':'one', 'bar':'two'}, (('foo', 'one'), ('bar', 'two'))),
-        ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'},
-             (('blue', u'blå'.encode('utf-8')), ('green', u'grøn'.encode('utf-8')))),
+    @base.parametrize('args', [
+        {'foo':'one', 'bar':'two'},
+        {'blue': 'blå', 'green': 'grøn'},
     ])
-    def test_redirection_to_login_form_preserves_get_args(self, args, args_encoded):
+    def test_redirection_to_login_form_preserves_get_args(self, args):
         with fixture.anon_access(False):
-            response = self.app.get(url(controller='summary', action='index',
-                                        repo_name=HG_REPO,
+            response = self.app.get(base.url(controller='summary', action='index',
+                                        repo_name=base.HG_REPO,
                                         **args))
             assert response.status == '302 Found'
-            came_from = urlparse.parse_qs(urlparse.urlparse(response.location).query)['came_from'][0]
-            came_from_qs = urlparse.parse_qsl(urlparse.urlparse(came_from).query)
-            for encoded in args_encoded:
-                assert encoded in came_from_qs
+            came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.location).query)['came_from'][0]
+            came_from_qs = urllib.parse.parse_qsl(urllib.parse.urlparse(came_from).query)
+            assert sorted(came_from_qs) == sorted(args.items())
 
-    @parametrize('args,args_encoded', [
+    @base.parametrize('args,args_encoded', [
         ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')),
-        ({'blue': u'blå', 'green':u'grøn'},
+        ({'blue': 'blå', 'green':'grøn'},
              ('blue=bl%C3%A5', 'green=gr%C3%B8n')),
     ])
     def test_login_form_preserves_get_args(self, args, args_encoded):
-        response = self.app.get(url(controller='login', action='index',
-                                    came_from=url('/_admin/users', **args)))
-        came_from = urlparse.parse_qs(urlparse.urlparse(response.form.action).query)['came_from'][0]
+        response = self.app.get(base.url(controller='login', action='index',
+                                    came_from=base.url('/_admin/users', **args)))
+        came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0]
         for encoded in args_encoded:
             assert encoded in came_from
 
-    @parametrize('args,args_encoded', [
+    @base.parametrize('args,args_encoded', [
         ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')),
-        ({'blue': u'blå', 'green':u'grøn'},
+        ({'blue': 'blå', 'green':'grøn'},
              ('blue=bl%C3%A5', 'green=gr%C3%B8n')),
     ])
     def test_redirection_after_successful_login_preserves_get_args(self, args, args_encoded):
-        response = self.app.post(url(controller='login', action='index',
-                                     came_from=url('/_admin/users', **args)),
-                                 {'username': TEST_USER_ADMIN_LOGIN,
-                                  'password': TEST_USER_ADMIN_PASS,
+        response = self.app.post(base.url(controller='login', action='index',
+                                     came_from=base.url('/_admin/users', **args)),
+                                 {'username': base.TEST_USER_ADMIN_LOGIN,
+                                  'password': base.TEST_USER_ADMIN_PASS,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         assert response.status == '302 Found'
         for encoded in args_encoded:
             assert encoded in response.location
 
-    @parametrize('args,args_encoded', [
+    @base.parametrize('args,args_encoded', [
         ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')),
-        ({'blue': u'blå', 'green':u'grøn'},
+        ({'blue': 'blå', 'green':'grøn'},
              ('blue=bl%C3%A5', 'green=gr%C3%B8n')),
     ])
     def test_login_form_after_incorrect_login_preserves_get_args(self, args, args_encoded):
-        response = self.app.post(url(controller='login', action='index',
-                                     came_from=url('/_admin/users', **args)),
+        response = self.app.post(base.url(controller='login', action='index',
+                                     came_from=base.url('/_admin/users', **args)),
                                  {'username': 'error',
                                   'password': 'test12',
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         response.mustcontain('Invalid username or password')
-        came_from = urlparse.parse_qs(urlparse.urlparse(response.form.action).query)['came_from'][0]
+        came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0]
         for encoded in args_encoded:
             assert encoded in came_from
 
@@ -226,12 +224,12 @@
     # REGISTRATIONS
     #==========================================================================
     def test_register(self):
-        response = self.app.get(url(controller='login', action='register'))
+        response = self.app.get(base.url(controller='login', action='register'))
         response.mustcontain('Sign Up')
 
     def test_register_err_same_username(self):
-        uname = TEST_USER_ADMIN_LOGIN
-        response = self.app.post(url(controller='login', action='register'),
+        uname = base.TEST_USER_ADMIN_LOGIN
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': uname,
                                              'password': 'test12',
                                              'password_confirmation': 'test12',
@@ -246,11 +244,11 @@
         response.mustcontain(msg)
 
     def test_register_err_same_email(self):
-        response = self.app.post(url(controller='login', action='register'),
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': 'test_admin_0',
                                              'password': 'test12',
                                              'password_confirmation': 'test12',
-                                             'email': TEST_USER_ADMIN_EMAIL,
+                                             'email': base.TEST_USER_ADMIN_EMAIL,
                                              'firstname': 'test',
                                              'lastname': 'test',
                                              '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -260,11 +258,11 @@
         response.mustcontain(msg)
 
     def test_register_err_same_email_case_sensitive(self):
-        response = self.app.post(url(controller='login', action='register'),
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': 'test_admin_1',
                                              'password': 'test12',
                                              'password_confirmation': 'test12',
-                                             'email': TEST_USER_ADMIN_EMAIL.title(),
+                                             'email': base.TEST_USER_ADMIN_EMAIL.title(),
                                              'firstname': 'test',
                                              'lastname': 'test',
                                              '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -273,7 +271,7 @@
         response.mustcontain(msg)
 
     def test_register_err_wrong_data(self):
-        response = self.app.post(url(controller='login', action='register'),
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': 'xs',
                                              'password': 'test',
                                              'password_confirmation': 'test',
@@ -286,7 +284,7 @@
         response.mustcontain('Enter a value 6 characters long or more')
 
     def test_register_err_username(self):
-        response = self.app.post(url(controller='login', action='register'),
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': 'error user',
                                              'password': 'test12',
                                              'password_confirmation': 'test12',
@@ -302,8 +300,8 @@
                 'alphanumeric character')
 
     def test_register_err_case_sensitive(self):
-        usr = TEST_USER_ADMIN_LOGIN.title()
-        response = self.app.post(url(controller='login', action='register'),
+        usr = base.TEST_USER_ADMIN_LOGIN.title()
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': usr,
                                              'password': 'test12',
                                              'password_confirmation': 'test12',
@@ -319,7 +317,7 @@
         response.mustcontain(msg)
 
     def test_register_special_chars(self):
-        response = self.app.post(url(controller='login', action='register'),
+        response = self.app.post(base.url(controller='login', action='register'),
                                         {'username': 'xxxaxn',
                                          'password': 'ąćźżąśśśś',
                                          'password_confirmation': 'ąćźżąśśśś',
@@ -333,7 +331,7 @@
         response.mustcontain(msg)
 
     def test_register_password_mismatch(self):
-        response = self.app.post(url(controller='login', action='register'),
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': 'xs',
                                              'password': '123qwe',
                                              'password_confirmation': 'qwe123',
@@ -352,7 +350,7 @@
         name = 'testname'
         lastname = 'testlastname'
 
-        response = self.app.post(url(controller='login', action='register'),
+        response = self.app.post(base.url(controller='login', action='register'),
                                             {'username': username,
                                              'password': password,
                                              'password_confirmation': password,
@@ -380,22 +378,22 @@
     def test_forgot_password_wrong_mail(self):
         bad_email = 'username%wrongmail.org'
         response = self.app.post(
-                        url(controller='login', action='password_reset'),
+                        base.url(controller='login', action='password_reset'),
                             {'email': bad_email,
                              '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         response.mustcontain('An email address must contain a single @')
 
     def test_forgot_password(self):
-        response = self.app.get(url(controller='login',
+        response = self.app.get(base.url(controller='login',
                                     action='password_reset'))
         assert response.status == '200 OK'
 
         username = 'test_password_reset_1'
         password = 'qweqwe'
         email = 'username@example.com'
-        name = u'passwd'
-        lastname = u'reset'
+        name = 'passwd'
+        lastname = 'reset'
         timestamp = int(time.time())
 
         new = User()
@@ -412,12 +410,12 @@
             User.get_by_username(username), timestamp, self.session_csrf_secret_token())
 
         collected = []
-        def mock_send_email(recipients, subject, body='', html_body='', headers=None, author=None):
+        def mock_send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
             collected.append((recipients, subject, body, html_body))
 
         with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', mock_send_email), \
                 mock.patch.object(time, 'time', lambda: timestamp):
-            response = self.app.post(url(controller='login',
+            response = self.app.post(base.url(controller='login',
                                          action='password_reset'),
                                      {'email': email,
                                       '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -431,7 +429,7 @@
         (confirmation_url,) = (line for line in body.splitlines() if line.startswith('http://'))
         assert ' href="%s"' % confirmation_url.replace('&', '&amp;').replace('@', '%40') in html_body
 
-        d = urlparse.parse_qs(urlparse.urlparse(confirmation_url).query)
+        d = urllib.parse.parse_qs(urllib.parse.urlparse(confirmation_url).query)
         assert d['token'] == [token]
         assert d['timestamp'] == [str(timestamp)]
         assert d['email'] == [email]
@@ -442,7 +440,7 @@
 
         bad_token = "bad"
 
-        response = self.app.post(url(controller='login',
+        response = self.app.post(base.url(controller='login',
                                      action='password_reset_confirmation'),
                                  {'email': email,
                                   'timestamp': timestamp,
@@ -459,14 +457,14 @@
         response = self.app.get(confirmation_url)
         assert response.status == '200 OK'
         response.mustcontain("You are about to set a new password for the email address %s" % email)
-        response.mustcontain('<form action="%s" method="post">' % url(controller='login', action='password_reset_confirmation'))
+        response.mustcontain('<form action="%s" method="post">' % base.url(controller='login', action='password_reset_confirmation'))
         response.mustcontain('value="%s"' % self.session_csrf_secret_token())
         response.mustcontain('value="%s"' % token)
         response.mustcontain('value="%s"' % timestamp)
         response.mustcontain('value="username@example.com"')
 
         # fake a submit of that form
-        response = self.app.post(url(controller='login',
+        response = self.app.post(base.url(controller='login',
                                      action='password_reset_confirmation'),
                                  {'email': email,
                                   'timestamp': timestamp,
@@ -502,16 +500,16 @@
                 params = {'api_key': api_key}
                 headers = {'Authorization': 'Bearer ' + str(api_key)}
 
-            self.app.get(url(controller='changeset', action='changeset_raw',
-                             repo_name=HG_REPO, revision='tip', **params),
+            self.app.get(base.url(controller='changeset', action='changeset_raw',
+                             repo_name=base.HG_REPO, revision='tip', **params),
                          status=status)
 
-            self.app.get(url(controller='changeset', action='changeset_raw',
-                             repo_name=HG_REPO, revision='tip'),
+            self.app.get(base.url(controller='changeset', action='changeset_raw',
+                             repo_name=base.HG_REPO, revision='tip'),
                          headers=headers,
                          status=status)
 
-    @parametrize('test_name,api_key,code', [
+    @base.parametrize('test_name,api_key,code', [
         ('none', None, 302),
         ('empty_string', '', 403),
         ('fake_number', '123456', 403),
@@ -523,12 +521,12 @@
         self._api_key_test(api_key, code)
 
     def test_access_page_via_extra_api_key(self):
-        new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test')
+        new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test')
         Session().commit()
         self._api_key_test(new_api_key.api_key, status=200)
 
     def test_access_page_via_expired_api_key(self):
-        new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test')
+        new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test')
         Session().commit()
         # patch the API key and make it expired
         new_api_key.expires = 0
--- a/kallithea/tests/functional/test_my_account.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_my_account.py	Sat May 02 21:20:43 2020 +0200
@@ -6,14 +6,14 @@
 from kallithea.model.db import Repository, User, UserApiKeys, UserFollowing, UserSshKeys
 from kallithea.model.meta import Session
 from kallithea.model.user import UserModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestMyAccountController(TestController):
+class TestMyAccountController(base.TestController):
     test_user_1 = 'testme'
 
     @classmethod
@@ -24,74 +24,74 @@
 
     def test_my_account(self):
         self.log_user()
-        response = self.app.get(url('my_account'))
+        response = self.app.get(base.url('my_account'))
 
-        response.mustcontain('value="%s' % TEST_USER_ADMIN_LOGIN)
+        response.mustcontain('value="%s' % base.TEST_USER_ADMIN_LOGIN)
 
     def test_my_account_my_repos(self):
         self.log_user()
-        response = self.app.get(url('my_account_repos'))
+        response = self.app.get(base.url('my_account_repos'))
         cnt = Repository.query().filter(Repository.owner ==
-                           User.get_by_username(TEST_USER_ADMIN_LOGIN)).count()
-        response.mustcontain('"raw_name": "%s"' % HG_REPO)
-        response.mustcontain('"just_name": "%s"' % GIT_REPO)
+                           User.get_by_username(base.TEST_USER_ADMIN_LOGIN)).count()
+        response.mustcontain('"raw_name": "%s"' % base.HG_REPO)
+        response.mustcontain('"just_name": "%s"' % base.GIT_REPO)
 
     def test_my_account_my_watched(self):
         self.log_user()
-        response = self.app.get(url('my_account_watched'))
+        response = self.app.get(base.url('my_account_watched'))
 
         cnt = UserFollowing.query().filter(UserFollowing.user ==
-                            User.get_by_username(TEST_USER_ADMIN_LOGIN)).count()
-        response.mustcontain('"raw_name": "%s"' % HG_REPO)
-        response.mustcontain('"just_name": "%s"' % GIT_REPO)
+                            User.get_by_username(base.TEST_USER_ADMIN_LOGIN)).count()
+        response.mustcontain('"raw_name": "%s"' % base.HG_REPO)
+        response.mustcontain('"just_name": "%s"' % base.GIT_REPO)
 
     def test_my_account_my_emails(self):
         self.log_user()
-        response = self.app.get(url('my_account_emails'))
+        response = self.app.get(base.url('my_account_emails'))
         response.mustcontain('No additional emails specified')
 
     def test_my_account_my_emails_add_existing_email(self):
         self.log_user()
-        response = self.app.get(url('my_account_emails'))
+        response = self.app.get(base.url('my_account_emails'))
         response.mustcontain('No additional emails specified')
-        response = self.app.post(url('my_account_emails'),
-                                 {'new_email': TEST_USER_REGULAR_EMAIL, '_session_csrf_secret_token': self.session_csrf_secret_token()})
+        response = self.app.post(base.url('my_account_emails'),
+                                 {'new_email': base.TEST_USER_REGULAR_EMAIL, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'This email address is already in use')
 
     def test_my_account_my_emails_add_missing_email_in_form(self):
         self.log_user()
-        response = self.app.get(url('my_account_emails'))
+        response = self.app.get(base.url('my_account_emails'))
         response.mustcontain('No additional emails specified')
-        response = self.app.post(url('my_account_emails'),
+        response = self.app.post(base.url('my_account_emails'),
             {'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Please enter an email address')
 
     def test_my_account_my_emails_add_remove(self):
         self.log_user()
-        response = self.app.get(url('my_account_emails'))
+        response = self.app.get(base.url('my_account_emails'))
         response.mustcontain('No additional emails specified')
 
-        response = self.app.post(url('my_account_emails'),
+        response = self.app.post(base.url('my_account_emails'),
                                  {'new_email': 'barz@example.com', '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
-        response = self.app.get(url('my_account_emails'))
+        response = self.app.get(base.url('my_account_emails'))
 
         from kallithea.model.db import UserEmailMap
         email_id = UserEmailMap.query() \
-            .filter(UserEmailMap.user == User.get_by_username(TEST_USER_ADMIN_LOGIN)) \
+            .filter(UserEmailMap.user == User.get_by_username(base.TEST_USER_ADMIN_LOGIN)) \
             .filter(UserEmailMap.email == 'barz@example.com').one().email_id
 
         response.mustcontain('barz@example.com')
         response.mustcontain('<input id="del_email_id" name="del_email_id" type="hidden" value="%s" />' % email_id)
 
-        response = self.app.post(url('my_account_emails_delete'),
+        response = self.app.post(base.url('my_account_emails_delete'),
                                  {'del_email_id': email_id, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Removed email from user')
-        response = self.app.get(url('my_account_emails'))
+        response = self.app.get(base.url('my_account_emails'))
         response.mustcontain('No additional emails specified')
 
 
-    @parametrize('name,attrs',
+    @base.parametrize('name,attrs',
         [('firstname', {'firstname': 'new_username'}),
          ('lastname', {'lastname': 'new_username'}),
          ('admin', {'admin': True}),
@@ -123,7 +123,7 @@
         params.update({'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         params.update(attrs)
-        response = self.app.post(url('my_account'), params)
+        response = self.app.post(base.url('my_account'), params)
 
         self.checkSessionFlash(response,
                                'Your account was updated successfully')
@@ -155,14 +155,14 @@
     def test_my_account_update_err_email_exists(self):
         self.log_user()
 
-        new_email = TEST_USER_REGULAR_EMAIL  # already existing email
-        response = self.app.post(url('my_account'),
+        new_email = base.TEST_USER_REGULAR_EMAIL  # already existing email
+        response = self.app.post(base.url('my_account'),
                                 params=dict(
-                                    username=TEST_USER_ADMIN_LOGIN,
-                                    new_password=TEST_USER_ADMIN_PASS,
+                                    username=base.TEST_USER_ADMIN_LOGIN,
+                                    new_password=base.TEST_USER_ADMIN_PASS,
                                     password_confirmation='test122',
-                                    firstname=u'NewName',
-                                    lastname=u'NewLastname',
+                                    firstname='NewName',
+                                    lastname='NewLastname',
                                     email=new_email,
                                     _session_csrf_secret_token=self.session_csrf_secret_token())
                                 )
@@ -170,16 +170,16 @@
         response.mustcontain('This email address is already in use')
 
     def test_my_account_update_err(self):
-        self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS)
+        self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
 
         new_email = 'newmail.pl'
-        response = self.app.post(url('my_account'),
+        response = self.app.post(base.url('my_account'),
                                  params=dict(
-                                            username=TEST_USER_ADMIN_LOGIN,
-                                            new_password=TEST_USER_ADMIN_PASS,
+                                            username=base.TEST_USER_ADMIN_LOGIN,
+                                            new_password=base.TEST_USER_ADMIN_PASS,
                                             password_confirmation='test122',
-                                            firstname=u'NewName',
-                                            lastname=u'NewLastname',
+                                            firstname='NewName',
+                                            lastname='NewLastname',
                                             email=new_email,
                                             _session_csrf_secret_token=self.session_csrf_secret_token()))
 
@@ -188,25 +188,25 @@
         with test_context(self.app):
             msg = validators.ValidUsername(edit=False, old_data={}) \
                     ._messages['username_exists']
-        msg = h.html_escape(msg % {'username': TEST_USER_ADMIN_LOGIN})
+        msg = h.html_escape(msg % {'username': base.TEST_USER_ADMIN_LOGIN})
         response.mustcontain(msg)
 
     def test_my_account_api_keys(self):
-        usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS)
+        usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
         user = User.get(usr['user_id'])
-        response = self.app.get(url('my_account_api_keys'))
+        response = self.app.get(base.url('my_account_api_keys'))
         response.mustcontain(user.api_key)
         response.mustcontain('Expires: Never')
 
-    @parametrize('desc,lifetime', [
+    @base.parametrize('desc,lifetime', [
         ('forever', -1),
         ('5mins', 60*5),
         ('30days', 60*60*24*30),
     ])
     def test_my_account_add_api_keys(self, desc, lifetime):
-        usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS)
+        usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
         user = User.get(usr['user_id'])
-        response = self.app.post(url('my_account_api_keys'),
+        response = self.app.post(base.url('my_account_api_keys'),
                                  {'description': desc, 'lifetime': lifetime, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully created')
         try:
@@ -220,9 +220,9 @@
                 Session().commit()
 
     def test_my_account_remove_api_key(self):
-        usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS)
+        usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
         user = User.get(usr['user_id'])
-        response = self.app.post(url('my_account_api_keys'),
+        response = self.app.post(base.url('my_account_api_keys'),
                                  {'description': 'desc', 'lifetime': -1, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully created')
         response = response.follow()
@@ -231,33 +231,33 @@
         keys = UserApiKeys.query().all()
         assert 1 == len(keys)
 
-        response = self.app.post(url('my_account_api_keys_delete'),
+        response = self.app.post(base.url('my_account_api_keys_delete'),
                  {'del_api_key': keys[0].api_key, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully deleted')
         keys = UserApiKeys.query().all()
         assert 0 == len(keys)
 
     def test_my_account_reset_main_api_key(self):
-        usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS)
+        usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
         user = User.get(usr['user_id'])
         api_key = user.api_key
-        response = self.app.get(url('my_account_api_keys'))
+        response = self.app.get(base.url('my_account_api_keys'))
         response.mustcontain(api_key)
         response.mustcontain('Expires: Never')
 
-        response = self.app.post(url('my_account_api_keys_delete'),
+        response = self.app.post(base.url('my_account_api_keys_delete'),
                  {'del_api_key_builtin': api_key, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully reset')
         response = response.follow()
         response.mustcontain(no=[api_key])
 
     def test_my_account_add_ssh_key(self):
-        description = u'something'
-        public_key = u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
-        fingerprint = u'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
+        description = 'something'
+        public_key = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
+        fingerprint = 'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
 
-        self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS)
-        response = self.app.post(url('my_account_ssh_keys'),
+        self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
+        response = self.app.post(base.url('my_account_ssh_keys'),
                                  {'description': description,
                                   'public_key': public_key,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -273,12 +273,12 @@
         Session().commit()
 
     def test_my_account_remove_ssh_key(self):
-        description = u''
-        public_key = u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
-        fingerprint = u'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
+        description = ''
+        public_key = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== me@localhost'
+        fingerprint = 'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
 
-        self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS)
-        response = self.app.post(url('my_account_ssh_keys'),
+        self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
+        response = self.app.post(base.url('my_account_ssh_keys'),
                                  {'description': description,
                                   'public_key': public_key,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -286,9 +286,9 @@
         response.follow()
         user_id = response.session['authuser']['user_id']
         ssh_key = UserSshKeys.query().filter(UserSshKeys.user_id == user_id).one()
-        assert ssh_key.description == u'me@localhost'
+        assert ssh_key.description == 'me@localhost'
 
-        response = self.app.post(url('my_account_ssh_keys_delete'),
+        response = self.app.post(base.url('my_account_ssh_keys_delete'),
                                  {'del_public_key_fingerprint': ssh_key.fingerprint,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'SSH key successfully deleted')
--- a/kallithea/tests/functional/test_pullrequests.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_pullrequests.py	Sat May 02 21:20:43 2020 +0200
@@ -5,27 +5,27 @@
 from kallithea.controllers.pullrequests import PullrequestsController
 from kallithea.model.db import PullRequest, User
 from kallithea.model.meta import Session
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestPullrequestsController(TestController):
+class TestPullrequestsController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url(controller='pullrequests', action='index',
-                                    repo_name=HG_REPO))
+        response = self.app.get(base.url(controller='pullrequests', action='index',
+                                    repo_name=base.HG_REPO))
 
     def test_create_trivial(self):
         self.log_user()
-        response = self.app.post(url(controller='pullrequests', action='create',
-                                     repo_name=HG_REPO),
-                                 {'org_repo': HG_REPO,
+        response = self.app.post(base.url(controller='pullrequests', action='create',
+                                     repo_name=base.HG_REPO),
+                                 {'org_repo': base.HG_REPO,
                                   'org_ref': 'branch:stable:4f7e2131323e0749a740c0a56ab68ae9269c562a',
-                                  'other_repo': HG_REPO,
+                                  'other_repo': base.HG_REPO,
                                   'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
@@ -40,11 +40,11 @@
 
     def test_available(self):
         self.log_user()
-        response = self.app.post(url(controller='pullrequests', action='create',
-                                     repo_name=HG_REPO),
-                                 {'org_repo': HG_REPO,
+        response = self.app.post(base.url(controller='pullrequests', action='create',
+                                     repo_name=base.HG_REPO),
+                                 {'org_repo': base.HG_REPO,
                                   'org_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0',
-                                  'other_repo': HG_REPO,
+                                  'other_repo': base.HG_REPO,
                                   'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
@@ -60,11 +60,11 @@
 
     def test_range(self):
         self.log_user()
-        response = self.app.post(url(controller='pullrequests', action='create',
-                                     repo_name=HG_REPO),
-                                 {'org_repo': HG_REPO,
+        response = self.app.post(base.url(controller='pullrequests', action='create',
+                                     repo_name=base.HG_REPO),
+                                 {'org_repo': base.HG_REPO,
                                   'org_ref': 'branch:stable:4f7e2131323e0749a740c0a56ab68ae9269c562a',
-                                  'other_repo': HG_REPO,
+                                  'other_repo': base.HG_REPO,
                                   'other_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
@@ -78,57 +78,57 @@
 
     def test_update_reviewers(self):
         self.log_user()
-        regular_user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
-        regular_user2 = User.get_by_username(TEST_USER_REGULAR2_LOGIN)
-        admin_user = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        regular_user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        regular_user2 = User.get_by_username(base.TEST_USER_REGULAR2_LOGIN)
+        admin_user = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
         # create initial PR
-        response = self.app.post(url(controller='pullrequests', action='create',
-                                     repo_name=HG_REPO),
-                                 {'org_repo': HG_REPO,
+        response = self.app.post(base.url(controller='pullrequests', action='create',
+                                     repo_name=base.HG_REPO),
+                                 {'org_repo': base.HG_REPO,
                                   'org_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0',
-                                  'other_repo': HG_REPO,
+                                  'other_repo': base.HG_REPO,
                                   'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
                                   '_session_csrf_secret_token': self.session_csrf_secret_token(),
                                  },
                                  status=302)
-        pull_request1_id = re.search('/pull-request/(\d+)/', response.location).group(1)
-        assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (HG_REPO, pull_request1_id)
+        pull_request1_id = re.search(r'/pull-request/(\d+)/', response.location).group(1)
+        assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (base.HG_REPO, pull_request1_id)
 
         # create new iteration
-        response = self.app.post(url(controller='pullrequests', action='post',
-                                     repo_name=HG_REPO, pull_request_id=pull_request1_id),
+        response = self.app.post(base.url(controller='pullrequests', action='post',
+                                     repo_name=base.HG_REPO, pull_request_id=pull_request1_id),
                                  {
                                   'updaterev': '4f7e2131323e0749a740c0a56ab68ae9269c562a',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
-                                  'owner': TEST_USER_ADMIN_LOGIN,
+                                  'owner': base.TEST_USER_ADMIN_LOGIN,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token(),
                                   'review_members': [regular_user.user_id],
                                  },
                                  status=302)
-        pull_request2_id = re.search('/pull-request/(\d+)/', response.location).group(1)
+        pull_request2_id = re.search(r'/pull-request/(\d+)/', response.location).group(1)
         assert pull_request2_id != pull_request1_id
-        assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (HG_REPO, pull_request2_id)
+        assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (base.HG_REPO, pull_request2_id)
         response = response.follow()
         # verify reviewer was added
         response.mustcontain('<input type="hidden" value="%s" name="review_members" />' % regular_user.user_id)
 
         # update without creating new iteration
-        response = self.app.post(url(controller='pullrequests', action='post',
-                                     repo_name=HG_REPO, pull_request_id=pull_request2_id),
+        response = self.app.post(base.url(controller='pullrequests', action='post',
+                                     repo_name=base.HG_REPO, pull_request_id=pull_request2_id),
                                  {
                                   'pullrequest_title': 'Title',
                                   'pullrequest_desc': 'description',
-                                  'owner': TEST_USER_ADMIN_LOGIN,
+                                  'owner': base.TEST_USER_ADMIN_LOGIN,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token(),
                                   'org_review_members': [admin_user.user_id], # fake - just to get some 'meanwhile' warning ... but it is also added ...
                                   'review_members': [regular_user2.user_id, admin_user.user_id],
                                  },
                                  status=302)
-        assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (HG_REPO, pull_request2_id)
+        assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (base.HG_REPO, pull_request2_id)
         response = response.follow()
         # verify reviewers were added / removed
         response.mustcontain('Meanwhile, the following reviewers have been added: test_regular')
@@ -141,12 +141,12 @@
         invalid_user_id = 99999
         self.log_user()
         # create a valid pull request
-        response = self.app.post(url(controller='pullrequests', action='create',
-                                     repo_name=HG_REPO),
+        response = self.app.post(base.url(controller='pullrequests', action='create',
+                                     repo_name=base.HG_REPO),
                                  {
-                                  'org_repo': HG_REPO,
+                                  'org_repo': base.HG_REPO,
                                   'org_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0',
-                                  'other_repo': HG_REPO,
+                                  'other_repo': base.HG_REPO,
                                   'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
@@ -155,34 +155,34 @@
                                 status=302)
         # location is of the form:
         # http://localhost/vcs_test_hg/pull-request/54/_/title
-        m = re.search('/pull-request/(\d+)/', response.location)
+        m = re.search(r'/pull-request/(\d+)/', response.location)
         assert m is not None
         pull_request_id = m.group(1)
 
         # update it
-        response = self.app.post(url(controller='pullrequests', action='post',
-                                     repo_name=HG_REPO, pull_request_id=pull_request_id),
+        response = self.app.post(base.url(controller='pullrequests', action='post',
+                                     repo_name=base.HG_REPO, pull_request_id=pull_request_id),
                                  {
                                   'updaterev': '4f7e2131323e0749a740c0a56ab68ae9269c562a',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
-                                  'owner': TEST_USER_ADMIN_LOGIN,
+                                  'owner': base.TEST_USER_ADMIN_LOGIN,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token(),
                                   'review_members': [str(invalid_user_id)],
                                  },
                                  status=400)
-        response.mustcontain('Invalid reviewer &#34;%s&#34; specified' % invalid_user_id)
+        response.mustcontain('Invalid reviewer &quot;%s&quot; specified' % invalid_user_id)
 
     def test_edit_with_invalid_reviewer(self):
         invalid_user_id = 99999
         self.log_user()
         # create a valid pull request
-        response = self.app.post(url(controller='pullrequests', action='create',
-                                     repo_name=HG_REPO),
+        response = self.app.post(base.url(controller='pullrequests', action='create',
+                                     repo_name=base.HG_REPO),
                                  {
-                                  'org_repo': HG_REPO,
+                                  'org_repo': base.HG_REPO,
                                   'org_ref': 'branch:stable:4f7e2131323e0749a740c0a56ab68ae9269c562a',
-                                  'other_repo': HG_REPO,
+                                  'other_repo': base.HG_REPO,
                                   'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e',
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
@@ -191,22 +191,22 @@
                                 status=302)
         # location is of the form:
         # http://localhost/vcs_test_hg/pull-request/54/_/title
-        m = re.search('/pull-request/(\d+)/', response.location)
+        m = re.search(r'/pull-request/(\d+)/', response.location)
         assert m is not None
         pull_request_id = m.group(1)
 
         # edit it
-        response = self.app.post(url(controller='pullrequests', action='post',
-                                     repo_name=HG_REPO, pull_request_id=pull_request_id),
+        response = self.app.post(base.url(controller='pullrequests', action='post',
+                                     repo_name=base.HG_REPO, pull_request_id=pull_request_id),
                                  {
                                   'pullrequest_title': 'title',
                                   'pullrequest_desc': 'description',
-                                  'owner': TEST_USER_ADMIN_LOGIN,
+                                  'owner': base.TEST_USER_ADMIN_LOGIN,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token(),
                                   'review_members': [str(invalid_user_id)],
                                  },
                                  status=400)
-        response.mustcontain('Invalid reviewer &#34;%s&#34; specified' % invalid_user_id)
+        response.mustcontain('Invalid reviewer &quot;%s&quot; specified' % invalid_user_id)
 
     def test_iteration_refs(self):
         # Repo graph excerpt:
@@ -226,18 +226,18 @@
 
         # create initial PR
         response = self.app.post(
-            url(controller='pullrequests', action='create', repo_name=HG_REPO),
+            base.url(controller='pullrequests', action='create', repo_name=base.HG_REPO),
             {
-                'org_repo': HG_REPO,
+                'org_repo': base.HG_REPO,
                 'org_ref': 'rev:9e6119747791:9e6119747791ff886a5abe1193a730b6bf874e1c',
-                'other_repo': HG_REPO,
+                'other_repo': base.HG_REPO,
                 'other_ref': 'branch:default:3d1091ee5a533b1f4577ec7d8a226bb315fb1336',
                 'pullrequest_title': 'title',
                 'pullrequest_desc': 'description',
                 '_session_csrf_secret_token': self.session_csrf_secret_token(),
             },
             status=302)
-        pr1_id = int(re.search('/pull-request/(\d+)/', response.location).group(1))
+        pr1_id = int(re.search(r'/pull-request/(\d+)/', response.location).group(1))
         pr1 = PullRequest.get(pr1_id)
 
         assert pr1.org_ref == 'branch:webvcs:9e6119747791ff886a5abe1193a730b6bf874e1c'
@@ -247,16 +247,16 @@
 
         # create PR 2 (new iteration with same ancestor)
         response = self.app.post(
-            url(controller='pullrequests', action='post', repo_name=HG_REPO, pull_request_id=pr1_id),
+            base.url(controller='pullrequests', action='post', repo_name=base.HG_REPO, pull_request_id=pr1_id),
             {
                 'updaterev': '5ec21f21aafe95220f1fc4843a4a57c378498b71',
                 'pullrequest_title': 'title',
                 'pullrequest_desc': 'description',
-                'owner': TEST_USER_REGULAR_LOGIN,
+                'owner': base.TEST_USER_REGULAR_LOGIN,
                 '_session_csrf_secret_token': self.session_csrf_secret_token(),
              },
              status=302)
-        pr2_id = int(re.search('/pull-request/(\d+)/', response.location).group(1))
+        pr2_id = int(re.search(r'/pull-request/(\d+)/', response.location).group(1))
         pr1 = PullRequest.get(pr1_id)
         pr2 = PullRequest.get(pr2_id)
 
@@ -269,16 +269,16 @@
 
         # create PR 3 (new iteration with new ancestor)
         response = self.app.post(
-            url(controller='pullrequests', action='post', repo_name=HG_REPO, pull_request_id=pr2_id),
+            base.url(controller='pullrequests', action='post', repo_name=base.HG_REPO, pull_request_id=pr2_id),
             {
                 'updaterev': 'fb95b340e0d03fa51f33c56c991c08077c99303e',
                 'pullrequest_title': 'title',
                 'pullrequest_desc': 'description',
-                'owner': TEST_USER_REGULAR_LOGIN,
+                'owner': base.TEST_USER_REGULAR_LOGIN,
                 '_session_csrf_secret_token': self.session_csrf_secret_token(),
              },
              status=302)
-        pr3_id = int(re.search('/pull-request/(\d+)/', response.location).group(1))
+        pr3_id = int(re.search(r'/pull-request/(\d+)/', response.location).group(1))
         pr2 = PullRequest.get(pr2_id)
         pr3 = PullRequest.get(pr3_id)
 
@@ -289,17 +289,17 @@
 
 
 @pytest.mark.usefixtures("test_context_fixture") # apply fixture for all test methods
-class TestPullrequestsGetRepoRefs(TestController):
+class TestPullrequestsGetRepoRefs(base.TestController):
 
     def setup_method(self, method):
-        self.repo_name = u'main'
+        self.repo_name = 'main'
         repo = fixture.create_repo(self.repo_name, repo_type='hg')
         self.repo_scm_instance = repo.scm_instance
         Session().commit()
         self.c = PullrequestsController()
 
     def teardown_method(self, method):
-        fixture.destroy_repo(u'main')
+        fixture.destroy_repo('main')
         Session().commit()
         Session.remove()
 
--- a/kallithea/tests/functional/test_repo_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_repo_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -1,16 +1,16 @@
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestRepoGroupsController(TestController):
+class TestRepoGroupsController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url('repos_groups'))
+        response = self.app.get(base.url('repos_groups'))
         response.mustcontain('"records": []')
 
     def test_new(self):
         self.log_user()
-        response = self.app.get(url('new_repos_group'))
+        response = self.app.get(base.url('new_repos_group'))
 
     def test_create(self):
         self.log_user()
@@ -18,14 +18,14 @@
         group_name = 'foo'
 
         # creation with form error
-        response = self.app.post(url('repos_groups'),
+        response = self.app.post(base.url('repos_groups'),
                                          {'group_name': group_name,
                                           '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.mustcontain('name="group_name" type="text" value="%s"' % group_name)
         response.mustcontain('<!-- for: group_description -->')
 
         # creation
-        response = self.app.post(url('repos_groups'),
+        response = self.app.post(base.url('repos_groups'),
                                          {'group_name': group_name,
                                          'group_description': 'lala',
                                          'parent_group_id': '-1',
@@ -34,18 +34,18 @@
         self.checkSessionFlash(response, 'Created repository group %s' % group_name)
 
         # edit form
-        response = self.app.get(url('edit_repo_group', group_name=group_name))
+        response = self.app.get(base.url('edit_repo_group', group_name=group_name))
         response.mustcontain('>lala<')
 
         # edit with form error
-        response = self.app.post(url('update_repos_group', group_name=group_name),
+        response = self.app.post(base.url('update_repos_group', group_name=group_name),
                                          {'group_name': group_name,
                                           '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.mustcontain('name="group_name" type="text" value="%s"' % group_name)
         response.mustcontain('<!-- for: group_description -->')
 
         # edit
-        response = self.app.post(url('update_repos_group', group_name=group_name),
+        response = self.app.post(base.url('update_repos_group', group_name=group_name),
                                          {'group_name': group_name,
                                          'group_description': 'lolo',
                                           '_session_csrf_secret_token': self.session_csrf_secret_token()})
@@ -56,22 +56,22 @@
         response.mustcontain('>lolo<')
 
         # listing
-        response = self.app.get(url('repos_groups'))
+        response = self.app.get(base.url('repos_groups'))
         response.mustcontain('raw_name": "%s"' % group_name)
 
         # show
-        response = self.app.get(url('repos_group', group_name=group_name))
+        response = self.app.get(base.url('repos_group', group_name=group_name))
         response.mustcontain('href="/_admin/repo_groups/%s/edit"' % group_name)
 
         # show ignores extra trailing slashes in the URL
-        response = self.app.get(url('repos_group', group_name='%s//' % group_name))
+        response = self.app.get(base.url('repos_group', group_name='%s//' % group_name))
         response.mustcontain('href="/_admin/repo_groups/%s/edit"' % group_name)
 
         # delete
-        response = self.app.post(url('delete_repo_group', group_name=group_name),
+        response = self.app.post(base.url('delete_repo_group', group_name=group_name),
                                  {'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'Removed repository group %s' % group_name)
 
     def test_new_by_regular_user(self):
-        self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
-        response = self.app.get(url('new_repos_group'), status=403)
+        self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
+        response = self.app.get(base.url('new_repos_group'), status=403)
--- a/kallithea/tests/functional/test_search.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_search.py	Sat May 02 21:20:43 2020 +0200
@@ -1,13 +1,13 @@
 import mock
 
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestSearchController(TestController):
+class TestSearchController(base.TestController):
 
     def test_index(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'))
+        response = self.app.get(base.url(controller='search', action='index'))
 
         response.mustcontain('class="form-control" id="q" name="q" type="text"')
         # Test response...
@@ -20,33 +20,33 @@
             'index_dir': str(tmpdir),
         }
         with mock.patch('kallithea.controllers.search.config', config_mock):
-            response = self.app.get(url(controller='search', action='index'),
-                                    {'q': HG_REPO})
+            response = self.app.get(base.url(controller='search', action='index'),
+                                    {'q': base.HG_REPO})
             response.mustcontain('The server has no search index.')
 
     def test_normal_search(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': 'def repo'})
         response.mustcontain('58 results')
 
     def test_repo_search(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
-                                {'q': 'repository:%s def test' % HG_REPO})
+        response = self.app.get(base.url(controller='search', action='index'),
+                                {'q': 'repository:%s def test' % base.HG_REPO})
 
         response.mustcontain('18 results')
 
     def test_search_last(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': 'last:t', 'type': 'commit'})
 
         response.mustcontain('2 results')
 
     def test_search_commit_message(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                     {'q': 'bother to ask where to fetch repo during tests',
                      'type': 'commit'})
 
@@ -56,8 +56,8 @@
 
     def test_search_commit_message_hg_repo(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index',
-                                    repo_name=HG_REPO),
+        response = self.app.get(base.url(controller='search', action='index',
+                                    repo_name=base.HG_REPO),
                     {'q': 'bother to ask where to fetch repo during tests',
                      'type': 'commit'})
 
@@ -66,7 +66,7 @@
 
     def test_search_commit_changed_file(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': 'changed:tests/utils.py',
                                  'type': 'commit'})
 
@@ -74,7 +74,7 @@
 
     def test_search_commit_changed_files_get_commit(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': 'changed:vcs/utils/archivers.py',
                                  'type': 'commit'})
 
@@ -90,7 +90,7 @@
 
     def test_search_commit_added_file(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': 'added:README.rst',
                                  'type': 'commit'})
 
@@ -102,7 +102,7 @@
 
     def test_search_author(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                     {'q': 'author:marcin@python-blog.com raw_id:b986218ba1c9b0d6a259fac9b050b1724ed8e545',
                      'type': 'commit'})
 
@@ -110,7 +110,7 @@
 
     def test_search_file_name(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                     {'q': 'README.rst', 'type': 'path'})
 
         response.mustcontain('2 results')
--- a/kallithea/tests/functional/test_search_indexing.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_search_indexing.py	Sat May 02 21:20:43 2020 +0200
@@ -5,7 +5,7 @@
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture, create_test_index
 
 
@@ -39,12 +39,12 @@
 
 repos = [
     # reponame,              init func or fork base, groupname
-    (u'indexing_test',       init_indexing_test,     None),
-    (u'indexing_test-fork',  u'indexing_test',       None),
-    (u'group/indexing_test', u'indexing_test',       u'group'),
-    (u'this-is-it',          u'indexing_test',       None),
-    (u'indexing_test-foo',   u'indexing_test',       None),
-    (u'stopword_test',       init_stopword_test,     None),
+    ('indexing_test',       init_indexing_test,     None),
+    ('indexing_test-fork',  'indexing_test',       None),
+    ('group/indexing_test', 'indexing_test',       'group'),
+    ('this-is-it',          'indexing_test',       None),
+    ('indexing_test-foo',   'indexing_test',       None),
+    ('stopword_test',       init_stopword_test,     None),
 ]
 
 
@@ -66,10 +66,10 @@
         # (FYI, ENOMEM occurs at forking "git" with python 2.7.3,
         # Linux 3.2.78-1 x86_64, 3GB memory, and no ulimit
         # configuration for memory)
-        create_test_index(TESTS_TMP_PATH, CONFIG, full_index=full_index)
+        create_test_index(base.TESTS_TMP_PATH, CONFIG, full_index=full_index)
 
 
-class TestSearchControllerIndexing(TestController):
+class TestSearchControllerIndexing(base.TestController):
     @classmethod
     def setup_class(cls):
         for reponame, init_or_fork, groupname in repos:
@@ -108,15 +108,15 @@
 
         rebuild_index(full_index=True) # rebuild fully for subsequent tests
 
-    @parametrize('reponame', [
-        (u'indexing_test'),
-        (u'indexing_test-fork'),
-        (u'group/indexing_test'),
-        (u'this-is-it'),
-        (u'*-fork'),
-        (u'group/*'),
+    @base.parametrize('reponame', [
+        ('indexing_test'),
+        ('indexing_test-fork'),
+        ('group/indexing_test'),
+        ('this-is-it'),
+        ('*-fork'),
+        ('group/*'),
     ])
-    @parametrize('searchtype,query,hit', [
+    @base.parametrize('searchtype,query,hit', [
         ('content', 'this_should_be_unique_content', 1),
         ('commit', 'this_should_be_unique_commit_log', 1),
         ('path', 'this_should_be_unique_filename.txt', 1),
@@ -125,17 +125,17 @@
         self.log_user()
 
         q = 'repository:%s %s' % (reponame, query)
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': q, 'type': searchtype})
         response.mustcontain('>%d results' % hit)
 
-    @parametrize('reponame', [
-        (u'indexing_test'),
-        (u'indexing_test-fork'),
-        (u'group/indexing_test'),
-        (u'this-is-it'),
+    @base.parametrize('reponame', [
+        ('indexing_test'),
+        ('indexing_test-fork'),
+        ('group/indexing_test'),
+        ('this-is-it'),
     ])
-    @parametrize('searchtype,query,hit', [
+    @base.parametrize('searchtype,query,hit', [
         ('content', 'this_should_be_unique_content', 1),
         ('commit', 'this_should_be_unique_commit_log', 1),
         ('path', 'this_should_be_unique_filename.txt', 1),
@@ -143,12 +143,12 @@
     def test_searching_under_repository(self, reponame, searchtype, query, hit):
         self.log_user()
 
-        response = self.app.get(url(controller='search', action='index',
+        response = self.app.get(base.url(controller='search', action='index',
                                     repo_name=reponame),
                                 {'q': query, 'type': searchtype})
         response.mustcontain('>%d results' % hit)
 
-    @parametrize('searchtype,query,hit', [
+    @base.parametrize('searchtype,query,hit', [
         ('content', 'path:this/is/it def test', 1),
         ('commit', 'added:this/is/it bother to ask where', 1),
         # this condition matches against files below, because
@@ -161,12 +161,12 @@
         ('path', 'extension:us', 1),
     ])
     def test_filename_stopword(self, searchtype, query, hit):
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': query, 'type': searchtype})
 
         response.mustcontain('>%d results' % hit)
 
-    @parametrize('searchtype,query,hit', [
+    @base.parametrize('searchtype,query,hit', [
         # matching against both 2 files
         ('content', 'owner:"this is it"', 0),
         ('content', 'owner:this-is-it', 0),
@@ -182,7 +182,7 @@
         ('commit', 'author:"this-is-it"', 1),
     ])
     def test_mailaddr_stopword(self, searchtype, query, hit):
-        response = self.app.get(url(controller='search', action='index'),
+        response = self.app.get(base.url(controller='search', action='index'),
                                 {'q': query, 'type': searchtype})
 
         response.mustcontain('>%d results' % hit)
--- a/kallithea/tests/functional/test_summary.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/functional/test_summary.py	Sat May 02 21:20:43 2020 +0200
@@ -18,7 +18,7 @@
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import ScmModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
@@ -32,14 +32,14 @@
         )
 
 
-class TestSummaryController(TestController):
+class TestSummaryController(base.TestController):
 
     def test_index_hg(self, custom_settings):
         self.log_user()
-        ID = Repository.get_by_repo_name(HG_REPO).repo_id
-        response = self.app.get(url(controller='summary',
+        ID = Repository.get_by_repo_name(base.HG_REPO).repo_id
+        response = self.app.get(base.url(controller='summary',
                                     action='index',
-                                    repo_name=HG_REPO))
+                                    repo_name=base.HG_REPO))
 
         # repo type
         response.mustcontain(
@@ -52,24 +52,24 @@
         # clone URLs
         response.mustcontain(
             '''<input class="form-control" size="80" readonly="readonly" value="http://%s@localhost:80/%s"/>''' %
-            (TEST_USER_ADMIN_LOGIN, HG_REPO)
+            (base.TEST_USER_ADMIN_LOGIN, base.HG_REPO)
         )
         response.mustcontain(
             '''<input class="form-control" size="80" readonly="readonly" value="http://%s@localhost:80/_%s"/>''' %
-            (TEST_USER_ADMIN_LOGIN, ID)
+            (base.TEST_USER_ADMIN_LOGIN, ID)
         )
         response.mustcontain(
             '''<input id="ssh_url" class="form-control" size="80" readonly="readonly" value="ssh://ssh_user@ssh_hostname/%s"/>''' %
-            (HG_REPO)
+            (base.HG_REPO)
         )
 
 
     def test_index_git(self, custom_settings):
         self.log_user()
-        ID = Repository.get_by_repo_name(GIT_REPO).repo_id
-        response = self.app.get(url(controller='summary',
+        ID = Repository.get_by_repo_name(base.GIT_REPO).repo_id
+        response = self.app.get(base.url(controller='summary',
                                     action='index',
-                                    repo_name=GIT_REPO))
+                                    repo_name=base.GIT_REPO))
 
         # repo type
         response.mustcontain(
@@ -82,21 +82,21 @@
         # clone URLs
         response.mustcontain(
             '''<input class="form-control" size="80" readonly="readonly" value="http://%s@localhost:80/%s"/>''' %
-            (TEST_USER_ADMIN_LOGIN, GIT_REPO)
+            (base.TEST_USER_ADMIN_LOGIN, base.GIT_REPO)
         )
         response.mustcontain(
             '''<input class="form-control" size="80" readonly="readonly" value="http://%s@localhost:80/_%s"/>''' %
-            (TEST_USER_ADMIN_LOGIN, ID)
+            (base.TEST_USER_ADMIN_LOGIN, ID)
         )
         response.mustcontain(
             '''<input id="ssh_url" class="form-control" size="80" readonly="readonly" value="ssh://ssh_user@ssh_hostname/%s"/>''' %
-            (GIT_REPO)
+            (base.GIT_REPO)
         )
 
     def test_index_by_id_hg(self):
         self.log_user()
-        ID = Repository.get_by_repo_name(HG_REPO).repo_id
-        response = self.app.get(url(controller='summary',
+        ID = Repository.get_by_repo_name(base.HG_REPO).repo_id
+        response = self.app.get(base.url(controller='summary',
                                     action='index',
                                     repo_name='_%s' % ID))
 
@@ -111,21 +111,21 @@
 
     def test_index_by_repo_having_id_path_in_name_hg(self):
         self.log_user()
-        fixture.create_repo(name=u'repo_1')
-        response = self.app.get(url(controller='summary',
+        fixture.create_repo(name='repo_1')
+        response = self.app.get(base.url(controller='summary',
                                     action='index',
                                     repo_name='repo_1'))
 
         try:
             response.mustcontain("repo_1")
         finally:
-            RepoModel().delete(Repository.get_by_repo_name(u'repo_1'))
+            RepoModel().delete(Repository.get_by_repo_name('repo_1'))
             Session().commit()
 
     def test_index_by_id_git(self):
         self.log_user()
-        ID = Repository.get_by_repo_name(GIT_REPO).repo_id
-        response = self.app.get(url(controller='summary',
+        ID = Repository.get_by_repo_name(base.GIT_REPO).repo_id
+        response = self.app.get(base.url(controller='summary',
                                     action='index',
                                     repo_name='_%s' % ID))
 
@@ -146,14 +146,14 @@
     def test_index_trending(self):
         self.log_user()
         # codes stats
-        self._enable_stats(HG_REPO)
+        self._enable_stats(base.HG_REPO)
 
-        ScmModel().mark_for_invalidation(HG_REPO)
+        ScmModel().mark_for_invalidation(base.HG_REPO)
         # generate statistics first
-        response = self.app.get(url(controller='summary', action='statistics',
-                                    repo_name=HG_REPO))
-        response = self.app.get(url(controller='summary', action='index',
-                                    repo_name=HG_REPO))
+        response = self.app.get(base.url(controller='summary', action='statistics',
+                                    repo_name=base.HG_REPO))
+        response = self.app.get(base.url(controller='summary', action='index',
+                                    repo_name=base.HG_REPO))
         response.mustcontain(
             '[["py", {"count": 68, "desc": ["Python"]}], '
             '["rst", {"count": 16, "desc": ["Rst"]}], '
@@ -170,23 +170,23 @@
     def test_index_statistics(self):
         self.log_user()
         # codes stats
-        self._enable_stats(HG_REPO)
+        self._enable_stats(base.HG_REPO)
 
-        ScmModel().mark_for_invalidation(HG_REPO)
-        response = self.app.get(url(controller='summary', action='statistics',
-                                    repo_name=HG_REPO))
+        ScmModel().mark_for_invalidation(base.HG_REPO)
+        response = self.app.get(base.url(controller='summary', action='statistics',
+                                    repo_name=base.HG_REPO))
 
     def test_index_trending_git(self):
         self.log_user()
         # codes stats
-        self._enable_stats(GIT_REPO)
+        self._enable_stats(base.GIT_REPO)
 
-        ScmModel().mark_for_invalidation(GIT_REPO)
+        ScmModel().mark_for_invalidation(base.GIT_REPO)
         # generate statistics first
-        response = self.app.get(url(controller='summary', action='statistics',
-                                    repo_name=GIT_REPO))
-        response = self.app.get(url(controller='summary', action='index',
-                                    repo_name=GIT_REPO))
+        response = self.app.get(base.url(controller='summary', action='statistics',
+                                    repo_name=base.GIT_REPO))
+        response = self.app.get(base.url(controller='summary', action='index',
+                                    repo_name=base.GIT_REPO))
         response.mustcontain(
             '[["py", {"count": 68, "desc": ["Python"]}], '
             '["rst", {"count": 16, "desc": ["Rst"]}], '
@@ -203,8 +203,8 @@
     def test_index_statistics_git(self):
         self.log_user()
         # codes stats
-        self._enable_stats(GIT_REPO)
+        self._enable_stats(base.GIT_REPO)
 
-        ScmModel().mark_for_invalidation(GIT_REPO)
-        response = self.app.get(url(controller='summary', action='statistics',
-                                    repo_name=GIT_REPO))
+        ScmModel().mark_for_invalidation(base.GIT_REPO)
+        response = self.app.get(base.url(controller='summary', action='statistics',
+                                    repo_name=base.GIT_REPO))
--- a/kallithea/tests/models/common.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/common.py	Sat May 02 21:20:43 2020 +0200
@@ -12,7 +12,7 @@
 
 def _destroy_project_tree(test_u1_id):
     Session.remove()
-    repo_group = RepoGroup.get_by_group_name(group_name=u'g0')
+    repo_group = RepoGroup.get_by_group_name(group_name='g0')
     for el in reversed(repo_group.recursive_groups_and_repos()):
         if isinstance(el, Repository):
             RepoModel().delete(el)
@@ -50,21 +50,21 @@
 
     """
     test_u1 = UserModel().create_or_update(
-        username=u'test_u1', password=u'qweqwe',
-        email=u'test_u1@example.com', firstname=u'test_u1', lastname=u'test_u1'
+        username='test_u1', password='qweqwe',
+        email='test_u1@example.com', firstname='test_u1', lastname='test_u1'
     )
-    g0 = fixture.create_repo_group(u'g0')
-    g0_1 = fixture.create_repo_group(u'g0_1', parent_group_id=g0)
-    g0_1_1 = fixture.create_repo_group(u'g0_1_1', parent_group_id=g0_1)
-    g0_1_1_r1 = fixture.create_repo(u'g0/g0_1/g0_1_1/g0_1_1_r1', repo_group=g0_1_1)
-    g0_1_1_r2 = fixture.create_repo(u'g0/g0_1/g0_1_1/g0_1_1_r2', repo_group=g0_1_1)
-    g0_1_r1 = fixture.create_repo(u'g0/g0_1/g0_1_r1', repo_group=g0_1)
-    g0_2 = fixture.create_repo_group(u'g0_2', parent_group_id=g0)
-    g0_2_r1 = fixture.create_repo(u'g0/g0_2/g0_2_r1', repo_group=g0_2)
-    g0_2_r2 = fixture.create_repo(u'g0/g0_2/g0_2_r2', repo_group=g0_2)
-    g0_3 = fixture.create_repo_group(u'g0_3', parent_group_id=g0)
-    g0_3_r1 = fixture.create_repo(u'g0/g0_3/g0_3_r1', repo_group=g0_3)
-    g0_3_r2_private = fixture.create_repo(u'g0/g0_3/g0_3_r1_private',
+    g0 = fixture.create_repo_group('g0')
+    g0_1 = fixture.create_repo_group('g0_1', parent_group_id=g0)
+    g0_1_1 = fixture.create_repo_group('g0_1_1', parent_group_id=g0_1)
+    g0_1_1_r1 = fixture.create_repo('g0/g0_1/g0_1_1/g0_1_1_r1', repo_group=g0_1_1)
+    g0_1_1_r2 = fixture.create_repo('g0/g0_1/g0_1_1/g0_1_1_r2', repo_group=g0_1_1)
+    g0_1_r1 = fixture.create_repo('g0/g0_1/g0_1_r1', repo_group=g0_1)
+    g0_2 = fixture.create_repo_group('g0_2', parent_group_id=g0)
+    g0_2_r1 = fixture.create_repo('g0/g0_2/g0_2_r1', repo_group=g0_2)
+    g0_2_r2 = fixture.create_repo('g0/g0_2/g0_2_r2', repo_group=g0_2)
+    g0_3 = fixture.create_repo_group('g0_3', parent_group_id=g0)
+    g0_3_r1 = fixture.create_repo('g0/g0_3/g0_3_r1', repo_group=g0_3)
+    g0_3_r2_private = fixture.create_repo('g0/g0_3/g0_3_r1_private',
                                           repo_group=g0_3, repo_private=True)
     return test_u1
 
--- a/kallithea/tests/models/test_changeset_status.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_changeset_status.py	Sat May 02 21:20:43 2020 +0200
@@ -1,6 +1,6 @@
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.db import ChangesetStatus as CS
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
 class CSM(object): # ChangesetStatusMock
@@ -9,12 +9,12 @@
         self.status = status
 
 
-class TestChangesetStatusCalculation(TestController):
+class TestChangesetStatusCalculation(base.TestController):
 
     def setup_method(self, method):
         self.m = ChangesetStatusModel()
 
-    @parametrize('name,expected_result,statuses', [
+    @base.parametrize('name,expected_result,statuses', [
         ('empty list', CS.STATUS_UNDER_REVIEW, []),
         ('approve', CS.STATUS_APPROVED, [CSM(CS.STATUS_APPROVED)]),
         ('approve2', CS.STATUS_APPROVED, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_APPROVED)]),
--- a/kallithea/tests/models/test_comments.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_comments.py	Sat May 02 21:20:43 2020 +0200
@@ -3,10 +3,10 @@
 
 from kallithea.model.comment import ChangesetCommentsModel
 from kallithea.model.db import Repository
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestComments(TestController):
+class TestComments(base.TestController):
 
     def _check_comment_count(self, repo_id, revision,
             expected_len_comments, expected_len_inline_comments,
@@ -23,17 +23,17 @@
 
     def test_create_delete_general_comment(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
+            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
                     expected_len_comments=0, expected_len_inline_comments=0)
 
-            text = u'a comment'
+            text = 'a comment'
             new_comment = ChangesetCommentsModel().create(
                     text=text,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     send_email=False)
 
@@ -47,19 +47,19 @@
 
     def test_create_delete_inline_comment(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
+            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
                     expected_len_comments=0, expected_len_inline_comments=0)
 
-            text = u'an inline comment'
-            f_path = u'vcs/tests/base.py'
-            line_no = u'n50'
+            text = 'an inline comment'
+            f_path = 'vcs/tests/base.py'
+            line_no = 'n50'
             new_comment = ChangesetCommentsModel().create(
                     text=text,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     f_path=f_path,
                     line_no=line_no,
@@ -81,42 +81,42 @@
 
     def test_create_delete_multiple_inline_comments(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
+            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
                     expected_len_comments=0, expected_len_inline_comments=0)
 
-            text = u'an inline comment'
-            f_path = u'vcs/tests/base.py'
-            line_no = u'n50'
+            text = 'an inline comment'
+            f_path = 'vcs/tests/base.py'
+            line_no = 'n50'
             new_comment = ChangesetCommentsModel().create(
                     text=text,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     f_path=f_path,
                     line_no=line_no,
                     send_email=False)
 
-            text2 = u'another inline comment, same file'
-            line_no2 = u'o41'
+            text2 = 'another inline comment, same file'
+            line_no2 = 'o41'
             new_comment2 = ChangesetCommentsModel().create(
                     text=text2,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     f_path=f_path,
                     line_no=line_no2,
                     send_email=False)
 
-            text3 = u'another inline comment, same file'
-            f_path3 = u'vcs/tests/test_hg.py'
-            line_no3 = u'n159'
+            text3 = 'another inline comment, same file'
+            f_path3 = 'vcs/tests/test_hg.py'
+            line_no3 = 'n159'
             new_comment3 = ChangesetCommentsModel().create(
                     text=text3,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     f_path=f_path3,
                     line_no=line_no3,
@@ -126,15 +126,15 @@
                     expected_len_comments=0, expected_len_inline_comments=2)
             # inline_comments is a list of tuples (file_path, dict)
             # where the dict keys are line numbers and values are lists of comments
-            assert inline_comments[1][0] == f_path
-            assert len(inline_comments[1][1]) == 2
-            assert inline_comments[1][1][line_no][0].text == text
-            assert inline_comments[1][1][line_no2][0].text == text2
+            assert inline_comments[0][0] == f_path
+            assert len(inline_comments[0][1]) == 2
+            assert inline_comments[0][1][line_no][0].text == text
+            assert inline_comments[0][1][line_no2][0].text == text2
 
-            assert inline_comments[0][0] == f_path3
-            assert len(inline_comments[0][1]) == 1
-            assert line_no3 in inline_comments[0][1]
-            assert inline_comments[0][1][line_no3][0].text == text3
+            assert inline_comments[1][0] == f_path3
+            assert len(inline_comments[1][1]) == 1
+            assert line_no3 in inline_comments[1][1]
+            assert inline_comments[1][1][line_no3][0].text == text3
 
             # now delete only one comment
             ChangesetCommentsModel().delete(new_comment2)
@@ -143,14 +143,14 @@
                     expected_len_comments=0, expected_len_inline_comments=2)
             # inline_comments is a list of tuples (file_path, dict)
             # where the dict keys are line numbers and values are lists of comments
-            assert inline_comments[1][0] == f_path
-            assert len(inline_comments[1][1]) == 1
-            assert inline_comments[1][1][line_no][0].text == text
+            assert inline_comments[0][0] == f_path
+            assert len(inline_comments[0][1]) == 1
+            assert inline_comments[0][1][line_no][0].text == text
 
-            assert inline_comments[0][0] == f_path3
-            assert len(inline_comments[0][1]) == 1
-            assert line_no3 in inline_comments[0][1]
-            assert inline_comments[0][1][line_no3][0].text == text3
+            assert inline_comments[1][0] == f_path3
+            assert len(inline_comments[1][1]) == 1
+            assert line_no3 in inline_comments[1][1]
+            assert inline_comments[1][1][line_no3][0].text == text3
 
             # now delete all others
             ChangesetCommentsModel().delete(new_comment)
@@ -161,42 +161,42 @@
 
     def test_selective_retrieval_of_inline_comments(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
+            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
                     expected_len_comments=0, expected_len_inline_comments=0)
 
-            text = u'an inline comment'
-            f_path = u'vcs/tests/base.py'
-            line_no = u'n50'
+            text = 'an inline comment'
+            f_path = 'vcs/tests/base.py'
+            line_no = 'n50'
             new_comment = ChangesetCommentsModel().create(
                     text=text,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     f_path=f_path,
                     line_no=line_no,
                     send_email=False)
 
-            text2 = u'another inline comment, same file'
-            line_no2 = u'o41'
+            text2 = 'another inline comment, same file'
+            line_no2 = 'o41'
             new_comment2 = ChangesetCommentsModel().create(
                     text=text2,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     f_path=f_path,
                     line_no=line_no2,
                     send_email=False)
 
-            text3 = u'another inline comment, same file'
-            f_path3 = u'vcs/tests/test_hg.py'
-            line_no3 = u'n159'
+            text3 = 'another inline comment, same file'
+            f_path3 = 'vcs/tests/test_hg.py'
+            line_no3 = 'n159'
             new_comment3 = ChangesetCommentsModel().create(
                     text=text3,
-                    repo=HG_REPO,
-                    author=TEST_USER_REGULAR_LOGIN,
+                    repo=base.HG_REPO,
+                    author=base.TEST_USER_REGULAR_LOGIN,
                     revision=revision,
                     f_path=f_path3,
                     line_no=line_no3,
--- a/kallithea/tests/models/test_diff_parsers.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_diff_parsers.py	Sat May 02 21:20:43 2020 +0200
@@ -1,5 +1,5 @@
 from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, COPIED_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE, DiffProcessor
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
@@ -271,9 +271,9 @@
 }
 
 
-class TestDiffLib(TestController):
+class TestDiffLib(base.TestController):
 
-    @parametrize('diff_fixture', DIFF_FIXTURES)
+    @base.parametrize('diff_fixture', DIFF_FIXTURES)
     def test_diff(self, diff_fixture):
         raw_diff = fixture.load_resource(diff_fixture, strip=False)
         vcs = 'hg'
@@ -295,20 +295,20 @@
             l.append('%(action)-7s %(new_lineno)3s %(old_lineno)3s %(line)r\n' % d)
         s = ''.join(l)
         assert s == r'''
-context ... ... u'@@ -51,6 +51,13 @@\n'
-unmod    51  51 u'<u>\t</u>begin();\n'
-unmod    52  52 u'<u>\t</u>\n'
-add      53     u'<u>\t</u>int foo;<u class="cr"></u>\n'
-add      54     u'<u>\t</u>int bar; <u class="cr"></u>\n'
-add      55     u'<u>\t</u>int baz;<u>\t</u><u class="cr"></u>\n'
-add      56     u'<u>\t</u>int space; <i></i>'
-add      57     u'<u>\t</u>int tab;<u>\t</u>\n'
-add      58     u'<u>\t</u>\n'
-unmod    59  53 u' <i></i>'
-del          54 u'<u>\t</u>#define MAX_STEPS (48)\n'
-add      60     u'<u>\t</u><u class="cr"></u>\n'
-add      61     u'<u>\t</u>#define MAX_STEPS (64)<u class="cr"></u>\n'
-unmod    62  55 u'\n'
-del          56 u'<u>\t</u>#define MIN_STEPS (<del>48</del>)\n'
-add      63     u'<u>\t</u>#define MIN_STEPS (<ins>42</ins>)\n'
+context ... ... '@@ -51,6 +51,13 @@\n'
+unmod    51  51 '<u>\t</u>begin();\n'
+unmod    52  52 '<u>\t</u>\n'
+add      53     '<u>\t</u>int foo;<u class="cr"></u>\n'
+add      54     '<u>\t</u>int bar; <u class="cr"></u>\n'
+add      55     '<u>\t</u>int baz;<u>\t</u><u class="cr"></u>\n'
+add      56     '<u>\t</u>int space; <i></i>'
+add      57     '<u>\t</u>int tab;<u>\t</u>\n'
+add      58     '<u>\t</u>\n'
+unmod    59  53 ' <i></i>'
+del          54 '<u>\t</u>#define MAX_STEPS (48)\n'
+add      60     '<u>\t</u><u class="cr"></u>\n'
+add      61     '<u>\t</u>#define MAX_STEPS (64)<u class="cr"></u>\n'
+unmod    62  55 '\n'
+del          56 '<u>\t</u>#define MIN_STEPS (<del>48</del>)\n'
+add      63     '<u>\t</u>#define MIN_STEPS (<ins>42</ins>)\n'
 '''
--- a/kallithea/tests/models/test_dump_html_mails.ref.html	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_dump_html_mails.ref.html	Sat May 02 21:20:43 2020 +0200
@@ -5,7 +5,7 @@
 <hr/>
 <h1>cs_comment, is_mention=False, status_change=None</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
 </pre>
@@ -164,7 +164,7 @@
 <hr/>
 <h1>cs_comment, is_mention=True, status_change=None</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
 </pre>
@@ -323,7 +323,7 @@
 <hr/>
 <h1>cs_comment, is_mention=False, status_change='Approved'</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Approved: Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
 </pre>
@@ -500,7 +500,7 @@
 <hr/>
 <h1>cs_comment, is_mention=True, status_change='Approved'</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Approved: Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
 </pre>
@@ -677,7 +677,7 @@
 <hr/>
 <h1>message</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: Test Message
 </pre>
@@ -748,7 +748,7 @@
 <hr/>
 <h1>registration</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: New user newbie registered
 </pre>
@@ -881,7 +881,7 @@
 <hr/>
 <h1>pull_request, is_mention=False</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Review] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -1072,7 +1072,7 @@
 <hr/>
 <h1>pull_request, is_mention=True</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Review] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -1263,7 +1263,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=False, status_change=None, closing_pr=False</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -1430,7 +1430,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=True, status_change=None, closing_pr=False</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -1597,7 +1597,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=False, status_change='Under Review', closing_pr=False</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Under Review: Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -1782,7 +1782,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=True, status_change='Under Review', closing_pr=False</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Under Review: Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -1967,7 +1967,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=False, status_change=None, closing_pr=True</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Closing: Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -2151,7 +2151,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=True, status_change=None, closing_pr=True</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Closing: Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -2335,7 +2335,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=False, status_change='Under Review', closing_pr=True</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Under Review, Closing: Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -2525,7 +2525,7 @@
 <hr/>
 <h1>pull_request_comment, is_mention=True, status_change='Under Review', closing_pr=True</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Under Review, Closing: Comment] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -2715,7 +2715,7 @@
 <hr/>
 <h1>TYPE_PASSWORD_RESET</h1>
 <pre>
-From: u1
+From: u1 u1 <name@example.com>
 To: john@doe.com
 Subject: Password reset link
 </pre>
--- a/kallithea/tests/models/test_notifications.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_notifications.py	Sat May 02 21:20:43 2020 +0200
@@ -11,31 +11,31 @@
 from kallithea.model.meta import Session
 from kallithea.model.notification import EmailNotificationModel, NotificationModel
 from kallithea.model.user import UserModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-class TestNotifications(TestController):
+class TestNotifications(base.TestController):
 
     def setup_method(self, method):
         Session.remove()
-        u1 = UserModel().create_or_update(username=u'u1',
-                                        password=u'qweqwe',
-                                        email=u'u1@example.com',
-                                        firstname=u'u1', lastname=u'u1')
+        u1 = UserModel().create_or_update(username='u1',
+                                        password='qweqwe',
+                                        email='u1@example.com',
+                                        firstname='u1', lastname='u1')
         Session().commit()
         self.u1 = u1.user_id
 
-        u2 = UserModel().create_or_update(username=u'u2',
-                                        password=u'qweqwe',
-                                        email=u'u2@example.com',
-                                        firstname=u'u2', lastname=u'u3')
+        u2 = UserModel().create_or_update(username='u2',
+                                        password='qweqwe',
+                                        email='u2@example.com',
+                                        firstname='u2', lastname='u3')
         Session().commit()
         self.u2 = u2.user_id
 
-        u3 = UserModel().create_or_update(username=u'u3',
-                                        password=u'qweqwe',
-                                        email=u'u3@example.com',
-                                        firstname=u'u3', lastname=u'u3')
+        u3 = UserModel().create_or_update(username='u3',
+                                        password='qweqwe',
+                                        email='u3@example.com',
+                                        firstname='u3', lastname='u3')
         Session().commit()
         self.u3 = u3.user_id
 
@@ -43,15 +43,15 @@
         with test_context(self.app):
             usrs = [self.u1, self.u2]
 
-            def send_email(recipients, subject, body='', html_body='', headers=None, author=None):
+            def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
                 assert recipients == ['u2@example.com']
                 assert subject == 'Test Message'
-                assert body == u"hi there"
+                assert body == "hi there"
                 assert '>hi there<' in html_body
-                assert author.username == 'u1'
+                assert from_name == 'u1 u1'
             with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email):
                 NotificationModel().create(created_by=self.u1,
-                                                   subject=u'subj', body=u'hi there',
+                                                   subject='subj', body='hi there',
                                                    recipients=usrs)
 
     @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items())))))
@@ -59,11 +59,11 @@
         # Exercise all notification types and dump them to one big html file
         l = []
 
-        def send_email(recipients, subject, body='', html_body='', headers=None, author=None):
+        def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
             l.append('<hr/>\n')
             l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope
             l.append('<pre>\n')
-            l.append('From: %s\n' % author.username)
+            l.append('From: %s <name@example.com>\n' % from_name)
             l.append('To: %s\n' % ' '.join(recipients))
             l.append('Subject: %s\n' % subject)
             l.append('</pre>\n')
@@ -90,7 +90,7 @@
 
                 for type_, body, kwargs in [
                         (NotificationModel.TYPE_CHANGESET_COMMENT,
-                         u'This is the new \'comment\'.\n\n - and here it ends indented.',
+                         'This is the new \'comment\'.\n\n - and here it ends indented.',
                          dict(
                             short_id='cafe1234',
                             raw_id='cafe1234c0ffeecafe',
@@ -105,18 +105,18 @@
                             cs_url='http://changeset.com',
                             cs_author=User.get(self.u2))),
                         (NotificationModel.TYPE_MESSAGE,
-                         u'This is the \'body\' of the "test" message\n - nothing interesting here except indentation.',
+                         'This is the \'body\' of the "test" message\n - nothing interesting here except indentation.',
                          dict()),
                         #(NotificationModel.TYPE_MENTION, '$body', None), # not used
                         (NotificationModel.TYPE_REGISTRATION,
-                         u'Registration body',
+                         'Registration body',
                          dict(
                             new_username='newbie',
                             registered_user_url='http://newbie.org',
                             new_email='new@email.com',
                             new_full_name='New Full Name')),
                         (NotificationModel.TYPE_PULL_REQUEST,
-                         u'This PR is \'awesome\' because it does <stuff>\n - please approve indented!',
+                         'This PR is \'awesome\' because it does <stuff>\n - please approve indented!',
                          dict(
                             pr_user_created='Requesting User (root)', # pr_owner should perhaps be used for @mention in description ...
                             is_mention=[False, True],
@@ -124,7 +124,7 @@
                             org_repo_name='repo_org',
                             **pr_kwargs)),
                         (NotificationModel.TYPE_PULL_REQUEST_COMMENT,
-                         u'Me too!\n\n - and indented on second line',
+                         'Me too!\n\n - and indented on second line',
                          dict(
                             closing_pr=[False, True],
                             is_mention=[False, True],
@@ -133,7 +133,7 @@
                             status_change=[None, 'Under Review'],
                             **pr_kwargs)),
                         ]:
-                    kwargs['repo_name'] = u'repo/name'
+                    kwargs['repo_name'] = 'repo/name'
                     params = [(type_, type_, body, kwargs)]
                     for param_name in ['is_mention', 'status_change', 'closing_pr']: # TODO: inline/general
                         if not isinstance(kwargs.get(param_name), list):
@@ -149,7 +149,7 @@
                     for desc, type_, body, kwargs in params:
                         # desc is used as "global" variable
                         NotificationModel().create(created_by=self.u1,
-                                                           subject=u'unused', body=body, email_kwargs=kwargs,
+                                                           subject='unused', body=body, email_kwargs=kwargs,
                                                            recipients=[self.u2], type_=type_)
 
                 # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly:
@@ -159,7 +159,7 @@
                     "Password reset link",
                     EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs),
                     EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs),
-                    author=User.get(self.u1))
+                    from_name=User.get(self.u1).full_name_or_username)
 
         out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \
             re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l))
--- a/kallithea/tests/models/test_permissions.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_permissions.py	Sat May 02 21:20:43 2020 +0200
@@ -1,44 +1,45 @@
 from kallithea.lib.auth import AuthUser
-from kallithea.model.db import Permission, RepoGroup, User, UserGroupRepoGroupToPerm, UserToPerm
+from kallithea.model import db
+from kallithea.model.db import Permission, User, UserGroupRepoGroupToPerm, UserToPerm
 from kallithea.model.meta import Session
 from kallithea.model.permission import PermissionModel
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestPermissions(TestController):
+class TestPermissions(base.TestController):
 
     @classmethod
     def setup_class(cls):
         # recreate default user to get a clean start
-        PermissionModel().create_default_permissions(user=User.DEFAULT_USER,
+        PermissionModel().create_default_permissions(user=User.DEFAULT_USER_NAME,
                                                      force=True)
         Session().commit()
 
     def setup_method(self, method):
         self.u1 = UserModel().create_or_update(
-            username=u'u1', password=u'qweqwe',
-            email=u'u1@example.com', firstname=u'u1', lastname=u'u1'
+            username='u1', password='qweqwe',
+            email='u1@example.com', firstname='u1', lastname='u1'
         )
         self.u2 = UserModel().create_or_update(
-            username=u'u2', password=u'qweqwe',
-            email=u'u2@example.com', firstname=u'u2', lastname=u'u2'
+            username='u2', password='qweqwe',
+            email='u2@example.com', firstname='u2', lastname='u2'
         )
         self.u3 = UserModel().create_or_update(
-            username=u'u3', password=u'qweqwe',
-            email=u'u3@example.com', firstname=u'u3', lastname=u'u3'
+            username='u3', password='qweqwe',
+            email='u3@example.com', firstname='u3', lastname='u3'
         )
         self.anon = User.get_default_user()
         self.a1 = UserModel().create_or_update(
-            username=u'a1', password=u'qweqwe',
-            email=u'a1@example.com', firstname=u'a1', lastname=u'a1', admin=True
+            username='a1', password='qweqwe',
+            email='a1@example.com', firstname='a1', lastname='a1', admin=True
         )
         Session().commit()
 
@@ -71,88 +72,88 @@
             'repositories_groups': {},
             'global': set(['hg.create.repository', 'repository.read',
                            'hg.register.manual_activate']),
-            'repositories': {HG_REPO: 'repository.read'}
+            'repositories': {base.HG_REPO: 'repository.read'}
         }
-        assert u1_auth.permissions['repositories'][HG_REPO] == perms['repositories'][HG_REPO]
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == perms['repositories'][base.HG_REPO]
         new_perm = 'repository.write'
-        RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1,
+        RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.u1,
                                           perm=new_perm)
         Session().commit()
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][HG_REPO] == new_perm
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == new_perm
 
     def test_default_admin_perms_set(self):
         a1_auth = AuthUser(user_id=self.a1.user_id)
         perms = {
             'repositories_groups': {},
             'global': set(['hg.admin', 'hg.create.write_on_repogroup.true']),
-            'repositories': {HG_REPO: 'repository.admin'}
+            'repositories': {base.HG_REPO: 'repository.admin'}
         }
-        assert a1_auth.permissions['repositories'][HG_REPO] == perms['repositories'][HG_REPO]
+        assert a1_auth.permissions['repositories'][base.HG_REPO] == perms['repositories'][base.HG_REPO]
         new_perm = 'repository.write'
-        RepoModel().grant_user_permission(repo=HG_REPO, user=self.a1,
+        RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.a1,
                                           perm=new_perm)
         Session().commit()
         # cannot really downgrade admins permissions !? they still gets set as
         # admin !
         u1_auth = AuthUser(user_id=self.a1.user_id)
-        assert u1_auth.permissions['repositories'][HG_REPO] == perms['repositories'][HG_REPO]
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == perms['repositories'][base.HG_REPO]
 
     def test_default_group_perms(self):
-        self.g1 = fixture.create_repo_group(u'test1', skip_if_exists=True)
-        self.g2 = fixture.create_repo_group(u'test2', skip_if_exists=True)
+        self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
+        self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
         u1_auth = AuthUser(user_id=self.u1.user_id)
         perms = {
-            'repositories_groups': {u'test1': 'group.read', u'test2': 'group.read'},
+            'repositories_groups': {'test1': 'group.read', 'test2': 'group.read'},
             'global': set(Permission.DEFAULT_USER_PERMISSIONS),
-            'repositories': {HG_REPO: 'repository.read'}
+            'repositories': {base.HG_REPO: 'repository.read'}
         }
-        assert u1_auth.permissions['repositories'][HG_REPO] == perms['repositories'][HG_REPO]
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == perms['repositories'][base.HG_REPO]
         assert u1_auth.permissions['repositories_groups'] == perms['repositories_groups']
         assert u1_auth.permissions['global'] == perms['global']
 
     def test_default_admin_group_perms(self):
-        self.g1 = fixture.create_repo_group(u'test1', skip_if_exists=True)
-        self.g2 = fixture.create_repo_group(u'test2', skip_if_exists=True)
+        self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
+        self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
         a1_auth = AuthUser(user_id=self.a1.user_id)
         perms = {
-            'repositories_groups': {u'test1': 'group.admin', u'test2': 'group.admin'},
+            'repositories_groups': {'test1': 'group.admin', 'test2': 'group.admin'},
             'global': set(['hg.admin', 'hg.create.write_on_repogroup.true']),
-            'repositories': {HG_REPO: 'repository.admin'}
+            'repositories': {base.HG_REPO: 'repository.admin'}
         }
 
-        assert a1_auth.permissions['repositories'][HG_REPO] == perms['repositories'][HG_REPO]
+        assert a1_auth.permissions['repositories'][base.HG_REPO] == perms['repositories'][base.HG_REPO]
         assert a1_auth.permissions['repositories_groups'] == perms['repositories_groups']
 
     def test_propagated_permission_from_users_group_by_explicit_perms_exist(self):
         # make group
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         UserGroupModel().add_user_to_group(self.ug1, self.u1)
 
         # set user permission none
-        RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, perm='repository.none')
+        RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.u1, perm='repository.none')
         Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][HG_REPO] == 'repository.read' # inherit from default user
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == 'repository.read' # inherit from default user
 
         # grant perm for group this should override permission from user
-        RepoModel().grant_user_group_permission(repo=HG_REPO,
+        RepoModel().grant_user_group_permission(repo=base.HG_REPO,
                                                  group_name=self.ug1,
                                                  perm='repository.write')
 
         # verify that user group permissions win
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][HG_REPO] == 'repository.write'
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == 'repository.write'
 
     def test_propagated_permission_from_users_group(self):
         # make group
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         UserGroupModel().add_user_to_group(self.ug1, self.u3)
 
         # grant perm for group this should override default permission from user
         new_perm_gr = 'repository.write'
-        RepoModel().grant_user_group_permission(repo=HG_REPO,
+        RepoModel().grant_user_group_permission(repo=base.HG_REPO,
                                                  group_name=self.ug1,
                                                  perm=new_perm_gr)
         # check perms
@@ -161,29 +162,29 @@
             'repositories_groups': {},
             'global': set(['hg.create.repository', 'repository.read',
                            'hg.register.manual_activate']),
-            'repositories': {HG_REPO: 'repository.read'}
+            'repositories': {base.HG_REPO: 'repository.read'}
         }
-        assert u3_auth.permissions['repositories'][HG_REPO] == new_perm_gr
+        assert u3_auth.permissions['repositories'][base.HG_REPO] == new_perm_gr
         assert u3_auth.permissions['repositories_groups'] == perms['repositories_groups']
 
     def test_propagated_permission_from_users_group_lower_weight(self):
         # make group
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         # add user to group
         UserGroupModel().add_user_to_group(self.ug1, self.u1)
 
         # set permission to lower
         new_perm_h = 'repository.write'
-        RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1,
+        RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.u1,
                                           perm=new_perm_h)
         Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][HG_REPO] == new_perm_h
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == new_perm_h
 
         # grant perm for group this should NOT override permission from user
         # since it's lower than granted
         new_perm_l = 'repository.read'
-        RepoModel().grant_user_group_permission(repo=HG_REPO,
+        RepoModel().grant_user_group_permission(repo=base.HG_REPO,
                                                  group_name=self.ug1,
                                                  perm=new_perm_l)
         # check perms
@@ -192,20 +193,20 @@
             'repositories_groups': {},
             'global': set(['hg.create.repository', 'repository.read',
                            'hg.register.manual_activate']),
-            'repositories': {HG_REPO: 'repository.write'}
+            'repositories': {base.HG_REPO: 'repository.write'}
         }
-        assert u1_auth.permissions['repositories'][HG_REPO] == new_perm_h
+        assert u1_auth.permissions['repositories'][base.HG_REPO] == new_perm_h
         assert u1_auth.permissions['repositories_groups'] == perms['repositories_groups']
 
     def test_repo_in_group_permissions(self):
-        self.g1 = fixture.create_repo_group(u'group1', skip_if_exists=True)
-        self.g2 = fixture.create_repo_group(u'group2', skip_if_exists=True)
+        self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
+        self.g2 = fixture.create_repo_group('group2', skip_if_exists=True)
         # both perms should be read !
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.read', u'group2': u'group.read'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.read', 'group2': 'group.read'}
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'] == {u'group1': u'group.read', u'group2': u'group.read'}
+        assert a1_auth.permissions['repositories_groups'] == {'group1': 'group.read', 'group2': 'group.read'}
 
         # Change perms to none for both groups
         RepoGroupModel().grant_user_permission(repo_group=self.g1,
@@ -216,23 +217,23 @@
                                                perm='group.none')
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.none', u'group2': u'group.none'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.none', 'group2': 'group.none'}
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'] == {u'group1': u'group.none', u'group2': u'group.none'}
+        assert a1_auth.permissions['repositories_groups'] == {'group1': 'group.none', 'group2': 'group.none'}
 
         # add repo to group
-        name = RepoGroup.url_sep().join([self.g1.group_name, 'test_perm'])
+        name = db.URL_SEP.join([self.g1.group_name, 'test_perm'])
         self.test_repo = fixture.create_repo(name=name,
                                              repo_type='hg',
                                              repo_group=self.g1,
                                              cur_user=self.u1,)
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.none', u'group2': u'group.none'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.none', 'group2': 'group.none'}
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'] == {u'group1': u'group.none', u'group2': u'group.none'}
+        assert a1_auth.permissions['repositories_groups'] == {'group1': 'group.none', 'group2': 'group.none'}
 
         # grant permission for u2 !
         RepoGroupModel().grant_user_permission(repo_group=self.g1, user=self.u2,
@@ -243,27 +244,27 @@
         assert self.u1 != self.u2
         # u1 and anon should have not change perms while u2 should !
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.none', u'group2': u'group.none'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.none', 'group2': 'group.none'}
 
         u2_auth = AuthUser(user_id=self.u2.user_id)
-        assert u2_auth.permissions['repositories_groups'] == {u'group1': u'group.read', u'group2': u'group.read'}
+        assert u2_auth.permissions['repositories_groups'] == {'group1': 'group.read', 'group2': 'group.read'}
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'] == {u'group1': u'group.none', u'group2': u'group.none'}
+        assert a1_auth.permissions['repositories_groups'] == {'group1': 'group.none', 'group2': 'group.none'}
 
     def test_repo_group_user_as_user_group_member(self):
         # create Group1
-        self.g1 = fixture.create_repo_group(u'group1', skip_if_exists=True)
+        self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
         a1_auth = AuthUser(user_id=self.anon.user_id)
 
-        assert a1_auth.permissions['repositories_groups'] == {u'group1': u'group.read'}
+        assert a1_auth.permissions['repositories_groups'] == {'group1': 'group.read'}
 
         # set default permission to none
         RepoGroupModel().grant_user_permission(repo_group=self.g1,
                                                user=self.anon,
                                                perm='group.none')
         # make group
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         # add user to group
         UserGroupModel().add_user_to_group(self.ug1, self.u1)
         Session().commit()
@@ -275,10 +276,10 @@
 
         # check his permissions
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'] == {u'group1': u'group.none'}
+        assert a1_auth.permissions['repositories_groups'] == {'group1': 'group.none'}
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.none'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.none'}
 
         # grant ug1 read permissions for
         RepoGroupModel().grant_user_group_permission(repo_group=self.g1,
@@ -294,10 +295,10 @@
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
 
-        assert a1_auth.permissions['repositories_groups'] == {u'group1': u'group.none'}
+        assert a1_auth.permissions['repositories_groups'] == {'group1': 'group.none'}
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.read'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.read'}
 
     def test_inherit_nice_permissions_from_default_user(self):
         user_model = UserModel()
@@ -388,7 +389,7 @@
     def test_inactive_user_group_does_not_affect_global_permissions(self):
         # Add user to inactive user group, set specific permissions on user
         # group and and verify it really is inactive.
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
@@ -420,7 +421,7 @@
     def test_inactive_user_group_does_not_affect_global_permissions_inverse(self):
         # Add user to inactive user group, set specific permissions on user
         # group and and verify it really is inactive.
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
@@ -450,14 +451,14 @@
                               'hg.create.write_on_repogroup.true'])
 
     def test_inactive_user_group_does_not_affect_repo_permissions(self):
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
 
         # note: make u2 repo owner rather than u1, because the owner always has
         # admin permissions
-        self.test_repo = fixture.create_repo(name=u'myownrepo',
+        self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
                                              cur_user=self.u2)
 
@@ -474,14 +475,14 @@
         assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.write'
 
     def test_inactive_user_group_does_not_affect_repo_permissions_inverse(self):
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
 
         # note: make u2 repo owner rather than u1, because the owner always has
         # admin permissions
-        self.test_repo = fixture.create_repo(name=u'myownrepo',
+        self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
                                              cur_user=self.u2)
 
@@ -498,12 +499,12 @@
         assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.admin'
 
     def test_inactive_user_group_does_not_affect_repo_group_permissions(self):
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
 
-        self.g1 = fixture.create_repo_group(u'group1', skip_if_exists=True)
+        self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
 
         # enable admin access for user group on repo group
         RepoGroupModel().grant_user_group_permission(self.g1,
@@ -515,15 +516,15 @@
                                                perm='group.write')
         Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.write'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.write'}
 
     def test_inactive_user_group_does_not_affect_repo_group_permissions_inverse(self):
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
 
-        self.g1 = fixture.create_repo_group(u'group1', skip_if_exists=True)
+        self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
 
         # enable only write access for user group on repo group
         RepoGroupModel().grant_user_group_permission(self.g1,
@@ -535,15 +536,15 @@
                                                perm='group.admin')
         Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'] == {u'group1': u'group.admin'}
+        assert u1_auth.permissions['repositories_groups'] == {'group1': 'group.admin'}
 
     def test_inactive_user_group_does_not_affect_user_group_permissions(self):
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
 
-        self.ug2 = fixture.create_user_group(u'G2')
+        self.ug2 = fixture.create_user_group('G2')
 
         # enable admin access for user group on user group
         UserGroupModel().grant_user_group_permission(self.ug2,
@@ -555,16 +556,16 @@
                                                perm='usergroup.write')
         Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['user_groups'][u'G1'] == u'usergroup.read'
-        assert u1_auth.permissions['user_groups'][u'G2'] == u'usergroup.write'
+        assert u1_auth.permissions['user_groups']['G1'] == 'usergroup.read'
+        assert u1_auth.permissions['user_groups']['G2'] == 'usergroup.write'
 
     def test_inactive_user_group_does_not_affect_user_group_permissions_inverse(self):
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         user_group_model = UserGroupModel()
         user_group_model.add_user_to_group(self.ug1, self.u1)
         user_group_model.update(self.ug1, {'users_group_active': False})
 
-        self.ug2 = fixture.create_user_group(u'G2')
+        self.ug2 = fixture.create_user_group('G2')
 
         # enable only write access for user group on user group
         UserGroupModel().grant_user_group_permission(self.ug2,
@@ -576,12 +577,12 @@
                                                perm='usergroup.admin')
         Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['user_groups'][u'G1'] == u'usergroup.read'
-        assert u1_auth.permissions['user_groups'][u'G2'] == u'usergroup.admin'
+        assert u1_auth.permissions['user_groups']['G1'] == 'usergroup.read'
+        assert u1_auth.permissions['user_groups']['G2'] == 'usergroup.admin'
 
     def test_owner_permissions_doesnot_get_overwritten_by_group(self):
         # create repo as USER,
-        self.test_repo = fixture.create_repo(name=u'myownrepo',
+        self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
                                              cur_user=self.u1)
 
@@ -589,7 +590,7 @@
         u1_auth = AuthUser(user_id=self.u1.user_id)
         assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.admin'
         # set his permission as user group, he should still be admin
-        self.ug1 = fixture.create_user_group(u'G1')
+        self.ug1 = fixture.create_user_group('G1')
         UserGroupModel().add_user_to_group(self.ug1, self.u1)
         RepoModel().grant_user_group_permission(self.test_repo,
                                                  group_name=self.ug1,
@@ -601,7 +602,7 @@
 
     def test_owner_permissions_doesnot_get_overwritten_by_others(self):
         # create repo as USER,
-        self.test_repo = fixture.create_repo(name=u'myownrepo',
+        self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
                                              cur_user=self.u1)
 
@@ -641,7 +642,7 @@
         PermissionModel().create_default_permissions(user=self.u1)
         self._test_def_perm_equal(user=self.u1)
 
-    @parametrize('perm,modify_to', [
+    @base.parametrize('perm,modify_to', [
         ('repository.read', 'repository.none'),
         ('group.read', 'group.none'),
         ('usergroup.read', 'usergroup.none'),
--- a/kallithea/tests/models/test_repo_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_repo_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -3,18 +3,19 @@
 import pytest
 from sqlalchemy.exc import IntegrityError
 
+from kallithea.model import db
 from kallithea.model.db import RepoGroup
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-def _update_repo_group(id_, group_name, desc=u'desc', parent_id=None):
+def _update_repo_group(id_, group_name, desc='desc', parent_id=None):
     form_data = dict(
         group_name=group_name,
         group_description=desc,
@@ -34,12 +35,12 @@
     return r
 
 
-class TestRepoGroups(TestController):
+class TestRepoGroups(base.TestController):
 
     def setup_method(self, method):
-        self.g1 = fixture.create_repo_group(u'test1', skip_if_exists=True)
-        self.g2 = fixture.create_repo_group(u'test2', skip_if_exists=True)
-        self.g3 = fixture.create_repo_group(u'test3', skip_if_exists=True)
+        self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
+        self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
+        self.g3 = fixture.create_repo_group('test3', skip_if_exists=True)
 
     def teardown_method(self, method):
         Session.remove()
@@ -48,7 +49,7 @@
         """
         Checks the path for existence !
         """
-        path = [TESTS_TMP_PATH] + list(path)
+        path = [base.TESTS_TMP_PATH] + list(path)
         path = os.path.join(*path)
         return os.path.isdir(path)
 
@@ -56,7 +57,7 @@
         RepoGroupModel().delete(id_)
 
     def test_create_group(self):
-        g = fixture.create_repo_group(u'newGroup')
+        g = fixture.create_repo_group('newGroup')
         Session().commit()
         assert g.full_path == 'newGroup'
 
@@ -64,85 +65,84 @@
 
     def test_create_same_name_group(self):
         with pytest.raises(IntegrityError):
-            fixture.create_repo_group(u'newGroup')
+            fixture.create_repo_group('newGroup')
         Session().rollback()
 
     def test_same_subgroup(self):
-        sg1 = fixture.create_repo_group(u'sub1', parent_group_id=self.g1.group_id)
+        sg1 = fixture.create_repo_group('sub1', parent_group_id=self.g1.group_id)
         assert sg1.parent_group == self.g1
         assert sg1.full_path == 'test1/sub1'
         assert self.__check_path('test1', 'sub1')
 
-        ssg1 = fixture.create_repo_group(u'subsub1', parent_group_id=sg1.group_id)
+        ssg1 = fixture.create_repo_group('subsub1', parent_group_id=sg1.group_id)
         assert ssg1.parent_group == sg1
         assert ssg1.full_path == 'test1/sub1/subsub1'
         assert self.__check_path('test1', 'sub1', 'subsub1')
 
     def test_remove_group(self):
-        sg1 = fixture.create_repo_group(u'deleteme')
+        sg1 = fixture.create_repo_group('deleteme')
         self.__delete_group(sg1.group_id)
 
         assert RepoGroup.get(sg1.group_id) is None
         assert not self.__check_path('deteteme')
 
-        sg1 = fixture.create_repo_group(u'deleteme', parent_group_id=self.g1.group_id)
+        sg1 = fixture.create_repo_group('deleteme', parent_group_id=self.g1.group_id)
         self.__delete_group(sg1.group_id)
 
         assert RepoGroup.get(sg1.group_id) is None
         assert not self.__check_path('test1', 'deteteme')
 
     def test_rename_single_group(self):
-        sg1 = fixture.create_repo_group(u'initial')
+        sg1 = fixture.create_repo_group('initial')
 
-        new_sg1 = _update_repo_group(sg1.group_id, u'after')
+        new_sg1 = _update_repo_group(sg1.group_id, 'after')
         assert self.__check_path('after')
-        assert RepoGroup.get_by_group_name(u'initial') is None
+        assert RepoGroup.get_by_group_name('initial') is None
 
     def test_update_group_parent(self):
 
-        sg1 = fixture.create_repo_group(u'initial', parent_group_id=self.g1.group_id)
+        sg1 = fixture.create_repo_group('initial', parent_group_id=self.g1.group_id)
 
-        new_sg1 = _update_repo_group(sg1.group_id, u'after', parent_id=self.g1.group_id)
+        new_sg1 = _update_repo_group(sg1.group_id, 'after', parent_id=self.g1.group_id)
         assert self.__check_path('test1', 'after')
-        assert RepoGroup.get_by_group_name(u'test1/initial') is None
+        assert RepoGroup.get_by_group_name('test1/initial') is None
 
-        new_sg1 = _update_repo_group(sg1.group_id, u'after', parent_id=self.g3.group_id)
+        new_sg1 = _update_repo_group(sg1.group_id, 'after', parent_id=self.g3.group_id)
         assert self.__check_path('test3', 'after')
-        assert RepoGroup.get_by_group_name(u'test3/initial') == None
+        assert RepoGroup.get_by_group_name('test3/initial') == None
 
-        new_sg1 = _update_repo_group(sg1.group_id, u'hello')
+        new_sg1 = _update_repo_group(sg1.group_id, 'hello')
         assert self.__check_path('hello')
 
-        assert RepoGroup.get_by_group_name(u'hello') == new_sg1
+        assert RepoGroup.get_by_group_name('hello') == new_sg1
 
     def test_subgrouping_with_repo(self):
 
-        g1 = fixture.create_repo_group(u'g1')
-        g2 = fixture.create_repo_group(u'g2')
+        g1 = fixture.create_repo_group('g1')
+        g2 = fixture.create_repo_group('g2')
         # create new repo
-        r = fixture.create_repo(u'john')
+        r = fixture.create_repo('john')
 
         assert r.repo_name == 'john'
         # put repo into group
-        r = _update_repo(u'john', repo_group=g1.group_id)
+        r = _update_repo('john', repo_group=g1.group_id)
         Session().commit()
         assert r.repo_name == 'g1/john'
 
-        _update_repo_group(g1.group_id, u'g1', parent_id=g2.group_id)
+        _update_repo_group(g1.group_id, 'g1', parent_id=g2.group_id)
         assert self.__check_path('g2', 'g1')
 
         # test repo
-        assert r.repo_name == RepoGroup.url_sep().join(['g2', 'g1',
-                                                                r.just_name])
+        assert r.repo_name == db.URL_SEP.join(['g2', 'g1', r.just_name])
 
     def test_move_to_root(self):
-        g1 = fixture.create_repo_group(u't11')
-        g2 = fixture.create_repo_group(u't22', parent_group_id=g1.group_id)
+        g1 = fixture.create_repo_group('t11')
+        g2 = fixture.create_repo_group('t22', parent_group_id=g1.group_id)
 
         assert g2.full_path == 't11/t22'
         assert self.__check_path('t11', 't22')
 
-        g2 = _update_repo_group(g2.group_id, u'g22', parent_id=None)
+        g2 = _update_repo_group(g2.group_id, 'g22', parent_id=None)
         Session().commit()
 
         assert g2.group_name == 'g22'
@@ -152,14 +152,14 @@
         assert self.__check_path('g22')
 
     def test_rename_top_level_group_in_nested_setup(self):
-        g1 = fixture.create_repo_group(u'L1')
-        g2 = fixture.create_repo_group(u'L2', parent_group_id=g1.group_id)
-        g3 = fixture.create_repo_group(u'L3', parent_group_id=g2.group_id)
+        g1 = fixture.create_repo_group('L1')
+        g2 = fixture.create_repo_group('L2', parent_group_id=g1.group_id)
+        g3 = fixture.create_repo_group('L3', parent_group_id=g2.group_id)
 
-        r = fixture.create_repo(u'L1/L2/L3/L3_REPO', repo_group=g3.group_id)
+        r = fixture.create_repo('L1/L2/L3/L3_REPO', repo_group=g3.group_id)
 
         ## rename L1 all groups should be now changed
-        _update_repo_group(g1.group_id, u'L1_NEW')
+        _update_repo_group(g1.group_id, 'L1_NEW')
         Session().commit()
         assert g1.full_path == 'L1_NEW'
         assert g2.full_path == 'L1_NEW/L2'
@@ -167,14 +167,14 @@
         assert r.repo_name == 'L1_NEW/L2/L3/L3_REPO'
 
     def test_change_parent_of_top_level_group_in_nested_setup(self):
-        g1 = fixture.create_repo_group(u'R1')
-        g2 = fixture.create_repo_group(u'R2', parent_group_id=g1.group_id)
-        g3 = fixture.create_repo_group(u'R3', parent_group_id=g2.group_id)
-        g4 = fixture.create_repo_group(u'R1_NEW')
+        g1 = fixture.create_repo_group('R1')
+        g2 = fixture.create_repo_group('R2', parent_group_id=g1.group_id)
+        g3 = fixture.create_repo_group('R3', parent_group_id=g2.group_id)
+        g4 = fixture.create_repo_group('R1_NEW')
 
-        r = fixture.create_repo(u'R1/R2/R3/R3_REPO', repo_group=g3.group_id)
+        r = fixture.create_repo('R1/R2/R3/R3_REPO', repo_group=g3.group_id)
         ## rename L1 all groups should be now changed
-        _update_repo_group(g1.group_id, u'R1', parent_id=g4.group_id)
+        _update_repo_group(g1.group_id, 'R1', parent_id=g4.group_id)
         Session().commit()
         assert g1.full_path == 'R1_NEW/R1'
         assert g2.full_path == 'R1_NEW/R1/R2'
@@ -182,15 +182,15 @@
         assert r.repo_name == 'R1_NEW/R1/R2/R3/R3_REPO'
 
     def test_change_parent_of_top_level_group_in_nested_setup_with_rename(self):
-        g1 = fixture.create_repo_group(u'X1')
-        g2 = fixture.create_repo_group(u'X2', parent_group_id=g1.group_id)
-        g3 = fixture.create_repo_group(u'X3', parent_group_id=g2.group_id)
-        g4 = fixture.create_repo_group(u'X1_NEW')
+        g1 = fixture.create_repo_group('X1')
+        g2 = fixture.create_repo_group('X2', parent_group_id=g1.group_id)
+        g3 = fixture.create_repo_group('X3', parent_group_id=g2.group_id)
+        g4 = fixture.create_repo_group('X1_NEW')
 
-        r = fixture.create_repo(u'X1/X2/X3/X3_REPO', repo_group=g3.group_id)
+        r = fixture.create_repo('X1/X2/X3/X3_REPO', repo_group=g3.group_id)
 
         ## rename L1 all groups should be now changed
-        _update_repo_group(g1.group_id, u'X1_PRIM', parent_id=g4.group_id)
+        _update_repo_group(g1.group_id, 'X1_PRIM', parent_id=g4.group_id)
         Session().commit()
         assert g1.full_path == 'X1_NEW/X1_PRIM'
         assert g2.full_path == 'X1_NEW/X1_PRIM/X2'
--- a/kallithea/tests/models/test_repos.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_repos.py	Sat May 02 21:20:43 2020 +0200
@@ -4,78 +4,78 @@
 from kallithea.model.db import Repository
 from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestRepos(TestController):
+class TestRepos(base.TestController):
 
     def teardown_method(self, method):
         Session.remove()
 
     def test_remove_repo(self):
-        repo = fixture.create_repo(name=u'test-repo-1')
+        repo = fixture.create_repo(name='test-repo-1')
         Session().commit()
 
         RepoModel().delete(repo=repo)
         Session().commit()
 
-        assert Repository.get_by_repo_name(repo_name=u'test-repo-1') is None
+        assert Repository.get_by_repo_name(repo_name='test-repo-1') is None
 
     def test_remove_repo_repo_raises_exc_when_attached_forks(self):
-        repo = fixture.create_repo(name=u'test-repo-1')
+        repo = fixture.create_repo(name='test-repo-1')
         Session().commit()
 
-        fixture.create_fork(repo.repo_name, u'test-repo-fork-1')
+        fixture.create_fork(repo.repo_name, 'test-repo-fork-1')
         Session().commit()
 
         with pytest.raises(AttachedForksError):
             RepoModel().delete(repo=repo)
         # cleanup
-        RepoModel().delete(repo=u'test-repo-fork-1')
-        RepoModel().delete(repo=u'test-repo-1')
+        RepoModel().delete(repo='test-repo-fork-1')
+        RepoModel().delete(repo='test-repo-1')
         Session().commit()
 
     def test_remove_repo_delete_forks(self):
-        repo = fixture.create_repo(name=u'test-repo-1')
+        repo = fixture.create_repo(name='test-repo-1')
         Session().commit()
 
-        fork = fixture.create_fork(repo.repo_name, u'test-repo-fork-1')
+        fork = fixture.create_fork(repo.repo_name, 'test-repo-fork-1')
         Session().commit()
 
         # fork of fork
-        fixture.create_fork(fork.repo_name, u'test-repo-fork-fork-1')
+        fixture.create_fork(fork.repo_name, 'test-repo-fork-fork-1')
         Session().commit()
 
         RepoModel().delete(repo=repo, forks='delete')
         Session().commit()
 
-        assert Repository.get_by_repo_name(repo_name=u'test-repo-1') is None
-        assert Repository.get_by_repo_name(repo_name=u'test-repo-fork-1') is None
-        assert Repository.get_by_repo_name(repo_name=u'test-repo-fork-fork-1') is None
+        assert Repository.get_by_repo_name(repo_name='test-repo-1') is None
+        assert Repository.get_by_repo_name(repo_name='test-repo-fork-1') is None
+        assert Repository.get_by_repo_name(repo_name='test-repo-fork-fork-1') is None
 
     def test_remove_repo_detach_forks(self):
-        repo = fixture.create_repo(name=u'test-repo-1')
+        repo = fixture.create_repo(name='test-repo-1')
         Session().commit()
 
-        fork = fixture.create_fork(repo.repo_name, u'test-repo-fork-1')
+        fork = fixture.create_fork(repo.repo_name, 'test-repo-fork-1')
         Session().commit()
 
         # fork of fork
-        fixture.create_fork(fork.repo_name, u'test-repo-fork-fork-1')
+        fixture.create_fork(fork.repo_name, 'test-repo-fork-fork-1')
         Session().commit()
 
         RepoModel().delete(repo=repo, forks='detach')
         Session().commit()
 
         try:
-            assert Repository.get_by_repo_name(repo_name=u'test-repo-1') is None
-            assert Repository.get_by_repo_name(repo_name=u'test-repo-fork-1') is not None
-            assert Repository.get_by_repo_name(repo_name=u'test-repo-fork-fork-1') is not None
+            assert Repository.get_by_repo_name(repo_name='test-repo-1') is None
+            assert Repository.get_by_repo_name(repo_name='test-repo-fork-1') is not None
+            assert Repository.get_by_repo_name(repo_name='test-repo-fork-fork-1') is not None
         finally:
-            RepoModel().delete(repo=u'test-repo-fork-fork-1')
-            RepoModel().delete(repo=u'test-repo-fork-1')
+            RepoModel().delete(repo='test-repo-fork-fork-1')
+            RepoModel().delete(repo='test-repo-fork-1')
             Session().commit()
--- a/kallithea/tests/models/test_settings.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_settings.py	Sat May 02 21:20:43 2020 +0200
@@ -35,12 +35,12 @@
     setting = Setting.create_or_update(name, 'spam', type='list')
     Session().flush() # must flush so we can delete it below
     try:
-        assert setting.app_settings_value == [u'spam']
+        assert setting.app_settings_value == ['spam']
         # Assign back setting value.
         setting.app_settings_value = setting.app_settings_value
         # Quirk: value is stringified on write and listified on read.
-        assert setting.app_settings_value == ["[u'spam']"]
+        assert setting.app_settings_value == ["['spam']"]
         setting.app_settings_value = setting.app_settings_value
-        assert setting.app_settings_value == ["[u\"[u'spam']\"]"]
+        assert setting.app_settings_value == ["[\"['spam']\"]"]
     finally:
         Session().delete(setting)
--- a/kallithea/tests/models/test_user_group_permissions_on_repo_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_user_group_permissions_on_repo_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -16,7 +16,7 @@
 _get_group_perms = None
 
 
-def permissions_setup_func(group_name=u'g0', perm='group.read', recursive='all'):
+def permissions_setup_func(group_name='g0', perm='group.read', recursive='all'):
     """
     Resets all permissions to perm attribute
     """
@@ -43,7 +43,7 @@
     Session().commit()
     test_u2_id = test_u2.user_id
 
-    gr1 = fixture.create_user_group(u'perms_group_1')
+    gr1 = fixture.create_user_group('perms_group_1')
     Session().commit()
     test_u2_gr_id = gr1.users_group_id
     UserGroupModel().add_user_to_group(gr1, user=test_u2_id)
@@ -57,13 +57,13 @@
 
 def teardown_module():
     _destroy_project_tree(test_u2_id)
-    fixture.destroy_user_group(u'perms_group_1')
+    fixture.destroy_user_group('perms_group_1')
 
 
 def test_user_permissions_on_group_without_recursive_mode():
     # set permission to g0 non-recursive mode
     recursive = 'none'
-    group = u'g0'
+    group = 'g0'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     items = [x for x in _get_repo_perms(group, recursive)]
@@ -82,7 +82,7 @@
 def test_user_permissions_on_group_without_recursive_mode_subgroup():
     # set permission to g0 non-recursive mode
     recursive = 'none'
-    group = u'g0/g0_1'
+    group = 'g0/g0_1'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     items = [x for x in _get_repo_perms(group, recursive)]
@@ -103,7 +103,7 @@
     # set permission to g0 recursive mode, all children including
     # other repos and groups should have this permission now set !
     recursive = 'all'
-    group = u'g0'
+    group = 'g0'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -120,7 +120,7 @@
 def test_user_permissions_on_group_with_recursive_mode_inner_group():
     ## set permission to g0_3 group to none
     recursive = 'all'
-    group = u'g0/g0_3'
+    group = 'g0/g0_3'
     permissions_setup_func(group, 'group.none', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -137,7 +137,7 @@
 def test_user_permissions_on_group_with_recursive_mode_deepest():
     ## set permission to g0/g0_1/g0_1_1 group to write
     recursive = 'all'
-    group = u'g0/g0_1/g0_1_1'
+    group = 'g0/g0_1/g0_1_1'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -154,7 +154,7 @@
 def test_user_permissions_on_group_with_recursive_mode_only_with_repos():
     ## set permission to g0/g0_2 group to admin
     recursive = 'all'
-    group = u'g0/g0_2'
+    group = 'g0/g0_2'
     permissions_setup_func(group, 'group.admin', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -171,7 +171,7 @@
 def test_user_permissions_on_group_with_recursive_mode_on_repos():
     # set permission to g0/g0_1 with recursive mode on just repositories
     recursive = 'repos'
-    group = u'g0/g0_1'
+    group = 'g0/g0_1'
     perm = 'group.write'
     permissions_setup_func(group, perm, recursive=recursive)
 
@@ -195,7 +195,7 @@
 def test_user_permissions_on_group_with_recursive_mode_on_repo_groups():
     # set permission to g0/g0_1 with recursive mode on just repository groups
     recursive = 'groups'
-    group = u'g0/g0_1'
+    group = 'g0/g0_1'
     perm = 'group.none'
     permissions_setup_func(group, perm, recursive=recursive)
 
--- a/kallithea/tests/models/test_user_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_user_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -1,14 +1,14 @@
 from kallithea.model.db import User, UserGroup
 from kallithea.model.meta import Session
 from kallithea.model.user_group import UserGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestUserGroups(TestController):
+class TestUserGroups(base.TestController):
 
     def teardown_method(self, method):
         # delete all groups
@@ -16,14 +16,14 @@
             fixture.destroy_user_group(gr)
         Session().commit()
 
-    @parametrize('pre_existing,regular_should_be,external_should_be,groups,expected', [
+    @base.parametrize('pre_existing,regular_should_be,external_should_be,groups,expected', [
         ([], [], [], [], []),
-        ([], [u'regular'], [], [], [u'regular']),  # no changes of regular
-        ([u'some_other'], [], [], [u'some_other'], []),   # not added to regular group
-        ([], [u'regular'], [u'container'], [u'container'], [u'regular', u'container']),
-        ([], [u'regular'], [], [u'container', u'container2'], [u'regular', u'container', u'container2']),
-        ([], [u'regular'], [u'other'], [], [u'regular']),  # remove not used
-        ([u'some_other'], [u'regular'], [u'other', u'container'], [u'container', u'container2'], [u'regular', u'container', u'container2']),
+        ([], ['regular'], [], [], ['regular']),  # no changes of regular
+        (['some_other'], [], [], ['some_other'], []),   # not added to regular group
+        ([], ['regular'], ['container'], ['container'], ['regular', 'container']),
+        ([], ['regular'], [], ['container', 'container2'], ['regular', 'container', 'container2']),
+        ([], ['regular'], ['other'], [], ['regular']),  # remove not used
+        (['some_other'], ['regular'], ['other', 'container'], ['container', 'container2'], ['regular', 'container', 'container2']),
     ])
     def test_enforce_groups(self, pre_existing, regular_should_be,
                             external_should_be, groups, expected):
@@ -32,7 +32,7 @@
             fixture.destroy_user_group(gr)
         Session().commit()
 
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         for gr in pre_existing:
             gr = fixture.create_user_group(gr)
         Session().commit()
@@ -54,6 +54,6 @@
         UserGroupModel().enforce_groups(user, groups, 'container')
         Session().commit()
 
-        user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         in_groups = user.group_member
-        assert expected == [x.users_group.users_group_name for x in in_groups]
+        assert sorted(expected) == sorted(x.users_group.users_group_name for x in in_groups)
--- a/kallithea/tests/models/test_user_permissions_on_repo_groups.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_user_permissions_on_repo_groups.py	Sat May 02 21:20:43 2020 +0200
@@ -1,6 +1,7 @@
 import functools
 
-from kallithea.model.db import RepoGroup, Repository, User
+import kallithea
+from kallithea.model.db import RepoGroup, Repository
 from kallithea.model.meta import Session
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.tests.models.common import _check_expected_count, _create_project_tree, _destroy_project_tree, _get_perms, check_tree_perms, expected_count
@@ -11,7 +12,7 @@
 _get_group_perms = None
 
 
-def permissions_setup_func(group_name=u'g0', perm='group.read', recursive='all',
+def permissions_setup_func(group_name='g0', perm='group.read', recursive='all',
                            user_id=None):
     """
     Resets all permissions to perm attribute
@@ -19,7 +20,7 @@
     if not user_id:
         user_id = test_u1_id
         permissions_setup_func(group_name, perm, recursive,
-                               user_id=User.get_default_user().user_id)
+                               user_id=kallithea.DEFAULT_USER_ID)
 
     repo_group = RepoGroup.get_by_group_name(group_name=group_name)
     if not repo_group:
@@ -56,7 +57,7 @@
 def test_user_permissions_on_group_without_recursive_mode():
     # set permission to g0 non-recursive mode
     recursive = 'none'
-    group = u'g0'
+    group = 'g0'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     items = [x for x in _get_repo_perms(group, recursive)]
@@ -75,7 +76,7 @@
 def test_user_permissions_on_group_without_recursive_mode_subgroup():
     # set permission to g0 non-recursive mode
     recursive = 'none'
-    group = u'g0/g0_1'
+    group = 'g0/g0_1'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     items = [x for x in _get_repo_perms(group, recursive)]
@@ -96,7 +97,7 @@
     # set permission to g0 recursive mode, all children including
     # other repos and groups should have this permission now set !
     recursive = 'all'
-    group = u'g0'
+    group = 'g0'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -115,8 +116,8 @@
     # set permission to g0 recursive mode, all children including
     # other repos and groups should have this permission now set !
     recursive = 'all'
-    group = u'g0'
-    default_user_id = User.get_default_user().user_id
+    group = 'g0'
+    default_user_id = kallithea.DEFAULT_USER_ID
     permissions_setup_func(group, 'group.write', recursive=recursive,
                            user_id=default_user_id)
 
@@ -142,7 +143,7 @@
 def test_user_permissions_on_group_with_recursive_mode_inner_group():
     ## set permission to g0_3 group to none
     recursive = 'all'
-    group = u'g0/g0_3'
+    group = 'g0/g0_3'
     permissions_setup_func(group, 'group.none', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -159,7 +160,7 @@
 def test_user_permissions_on_group_with_recursive_mode_deepest():
     ## set permission to g0_3 group to none
     recursive = 'all'
-    group = u'g0/g0_1/g0_1_1'
+    group = 'g0/g0_1/g0_1_1'
     permissions_setup_func(group, 'group.write', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -176,7 +177,7 @@
 def test_user_permissions_on_group_with_recursive_mode_only_with_repos():
     ## set permission to g0_3 group to none
     recursive = 'all'
-    group = u'g0/g0_2'
+    group = 'g0/g0_2'
     permissions_setup_func(group, 'group.admin', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
@@ -194,9 +195,9 @@
     # set permission to g0/g0_1 recursive repos only mode, all children including
     # other repos should have this permission now set, inner groups are excluded!
     recursive = 'repos'
-    group = u'g0/g0_1'
+    group = 'g0/g0_1'
     perm = 'group.none'
-    default_user_id = User.get_default_user().user_id
+    default_user_id = kallithea.DEFAULT_USER_ID
 
     permissions_setup_func(group, perm, recursive=recursive,
                            user_id=default_user_id)
@@ -227,7 +228,7 @@
 def test_user_permissions_on_group_with_recursive_repo_mode_inner_group():
     ## set permission to g0_3 group to none, with recursive repos only
     recursive = 'repos'
-    group = u'g0/g0_3'
+    group = 'g0/g0_3'
     perm = 'group.none'
     permissions_setup_func(group, perm, recursive=recursive)
 
@@ -253,8 +254,8 @@
     # other groups should have this permission now set. repositories should
     # remain intact as we use groups only mode !
     recursive = 'groups'
-    group = u'g0/g0_1'
-    default_user_id = User.get_default_user().user_id
+    group = 'g0/g0_1'
+    default_user_id = kallithea.DEFAULT_USER_ID
     permissions_setup_func(group, 'group.write', recursive=recursive,
                            user_id=default_user_id)
 
@@ -278,7 +279,7 @@
 def test_user_permissions_on_group_with_recursive_group_mode_inner_group():
     ## set permission to g0_3 group to none, with recursive mode for groups only
     recursive = 'groups'
-    group = u'g0/g0_3'
+    group = 'g0/g0_3'
     permissions_setup_func(group, 'group.none', recursive=recursive)
 
     repo_items = [x for x in _get_repo_perms(group, recursive)]
--- a/kallithea/tests/models/test_users.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/models/test_users.py	Sat May 02 21:20:43 2020 +0200
@@ -4,14 +4,14 @@
 from kallithea.model.meta import Session
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
 fixture = Fixture()
 
 
-class TestUser(TestController):
+class TestUser(base.TestController):
 
     @classmethod
     def setup_class(cls):
@@ -21,21 +21,21 @@
         Session.remove()
 
     def test_create_and_remove(self):
-        usr = UserModel().create_or_update(username=u'test_user',
-                                           password=u'qweqwe',
-                                           email=u'u232@example.com',
-                                           firstname=u'u1', lastname=u'u1')
+        usr = UserModel().create_or_update(username='test_user',
+                                           password='qweqwe',
+                                           email='u232@example.com',
+                                           firstname='u1', lastname='u1')
         Session().commit()
-        assert User.get_by_username(u'test_user') == usr
-        assert User.get_by_username(u'test_USER', case_insensitive=True) == usr
+        assert User.get_by_username('test_user') == usr
+        assert User.get_by_username('test_USER', case_insensitive=True) == usr
         # User.get_by_username without explicit request for case insensitivty
         # will use database case sensitivity. The following will thus return
         # None on for example PostgreSQL but find test_user on MySQL - we are
         # fine with leaving that as undefined as long as it doesn't crash.
-        User.get_by_username(u'test_USER', case_insensitive=False)
+        User.get_by_username('test_USER', case_insensitive=False)
 
         # make user group
-        user_group = fixture.create_user_group(u'some_example_group')
+        user_group = fixture.create_user_group('some_example_group')
         Session().commit()
 
         UserGroupModel().add_user_to_group(user_group, usr)
@@ -49,15 +49,15 @@
         assert UserGroupMember.query().all() == []
 
     def test_additional_email_as_main(self):
-        usr = UserModel().create_or_update(username=u'test_user',
-                                           password=u'qweqwe',
-                                     email=u'main_email@example.com',
-                                     firstname=u'u1', lastname=u'u1')
+        usr = UserModel().create_or_update(username='test_user',
+                                           password='qweqwe',
+                                     email='main_email@example.com',
+                                     firstname='u1', lastname='u1')
         Session().commit()
 
         with pytest.raises(AttributeError):
             m = UserEmailMap()
-            m.email = u'main_email@example.com'
+            m.email = 'main_email@example.com'
             m.user = usr
             Session().add(m)
             Session().commit()
@@ -66,14 +66,14 @@
         Session().commit()
 
     def test_extra_email_map(self):
-        usr = UserModel().create_or_update(username=u'test_user',
-                                           password=u'qweqwe',
-                                     email=u'main_email@example.com',
-                                     firstname=u'u1', lastname=u'u1')
+        usr = UserModel().create_or_update(username='test_user',
+                                           password='qweqwe',
+                                     email='main_email@example.com',
+                                     firstname='u1', lastname='u1')
         Session().commit()
 
         m = UserEmailMap()
-        m.email = u'main_email2@example.com'
+        m.email = 'main_email2@example.com'
         m.user = usr
         Session().add(m)
         Session().commit()
@@ -101,13 +101,13 @@
         Session().commit()
 
 
-class TestUsers(TestController):
+class TestUsers(base.TestController):
 
     def setup_method(self, method):
-        self.u1 = UserModel().create_or_update(username=u'u1',
-                                        password=u'qweqwe',
-                                        email=u'u1@example.com',
-                                        firstname=u'u1', lastname=u'u1')
+        self.u1 = UserModel().create_or_update(username='u1',
+                                        password='qweqwe',
+                                        email='u1@example.com',
+                                        firstname='u1', lastname='u1')
 
     def teardown_method(self, method):
         perm = Permission.query().all()
--- a/kallithea/tests/other/test_auth_ldap.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/other/test_auth_ldap.py	Sat May 02 21:20:43 2020 +0200
@@ -22,8 +22,8 @@
         pass
 
     def authenticate_ldap(self, username, password):
-        return 'spam dn', dict(test_ldap_firstname=[u'spam ldap first name'],
-                               test_ldap_lastname=[u'spam ldap last name'],
+        return 'spam dn', dict(test_ldap_firstname=['spam ldap first name'],
+                               test_ldap_lastname=['spam ldap last name'],
                                test_ldap_email=['spam ldap email'])
 
 
@@ -39,8 +39,8 @@
     user_input = dict(username='test-user-{0}'.format(uniqifier),
                       password='spam password',
                       email='spam-email-{0}'.format(uniqifier),
-                      firstname=u'spam first name',
-                      lastname=u'spam last name',
+                      firstname='spam first name',
+                      lastname='spam last name',
                       active=True,
                       admin=False)
     user = create_test_user(user_input)
@@ -54,14 +54,14 @@
     # Verify that authenication succeeded and retrieved correct attributes
     # from LDAP.
     assert user_data is not None
-    assert user_data.get('firstname') == u'spam ldap first name'
-    assert user_data.get('lastname') == u'spam ldap last name'
+    assert user_data.get('firstname') == 'spam ldap first name'
+    assert user_data.get('lastname') == 'spam ldap last name'
     assert user_data.get('email') == 'spam ldap email'
 
     # Verify that authentication overwrote user attributes with the ones
     # retrieved from LDAP.
-    assert user.firstname == u'spam ldap first name'
-    assert user.lastname == u'spam ldap last name'
+    assert user.firstname == 'spam ldap first name'
+    assert user.lastname == 'spam ldap last name'
     assert user.email == 'spam ldap email'
 
 
@@ -83,16 +83,16 @@
     # Verify that authenication succeeded and retrieved correct attributes
     # from LDAP.
     assert user_data is not None
-    assert user_data.get('firstname') == u'spam ldap first name'
-    assert user_data.get('lastname') == u'spam ldap last name'
+    assert user_data.get('firstname') == 'spam ldap first name'
+    assert user_data.get('lastname') == 'spam ldap last name'
     assert user_data.get('email') == 'spam ldap email'
 
     # Verify that authentication created new user with attributes
     # retrieved from LDAP.
     new_user = User.get_by_username(username)
     assert new_user is not None
-    assert new_user.firstname == u'spam ldap first name'
-    assert new_user.lastname == u'spam ldap last name'
+    assert new_user.firstname == 'spam ldap first name'
+    assert new_user.lastname == 'spam ldap last name'
     assert new_user.email == 'spam ldap email'
 
 
@@ -126,14 +126,14 @@
     # Verify that authenication succeeded and retrieved correct attributes
     # from LDAP, with empty email.
     assert user_data is not None
-    assert user_data.get('firstname') == u'spam ldap first name'
-    assert user_data.get('lastname') == u'spam ldap last name'
+    assert user_data.get('firstname') == 'spam ldap first name'
+    assert user_data.get('lastname') == 'spam ldap last name'
     assert user_data.get('email') == ''
 
     # Verify that authentication created new user with attributes
     # retrieved from LDAP, with email == None.
     new_user = User.get_by_username(username)
     assert new_user is not None
-    assert new_user.firstname == u'spam ldap first name'
-    assert new_user.lastname == u'spam ldap last name'
+    assert new_user.firstname == 'spam ldap first name'
+    assert new_user.lastname == 'spam ldap last name'
     assert new_user.email is None
--- a/kallithea/tests/other/test_libs.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/other/test_libs.py	Sat May 02 21:20:43 2020 +0200
@@ -31,9 +31,9 @@
 import mock
 from tg.util.webtest import test_context
 
-from kallithea.lib.utils2 import AttributeDict
+from kallithea.lib.utils2 import AttributeDict, safe_bytes
 from kallithea.model.db import Repository
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
 proto = 'http'
@@ -91,19 +91,19 @@
         return self.current_url % kwargs
 
 
-class TestLibs(TestController):
+class TestLibs(base.TestController):
 
-    @parametrize('test_url,expected,expected_creds', TEST_URLS)
+    @base.parametrize('test_url,expected,expected_creds', TEST_URLS)
     def test_uri_filter(self, test_url, expected, expected_creds):
         from kallithea.lib.utils2 import uri_filter
         assert uri_filter(test_url) == expected
 
-    @parametrize('test_url,expected,expected_creds', TEST_URLS)
+    @base.parametrize('test_url,expected,expected_creds', TEST_URLS)
     def test_credentials_filter(self, test_url, expected, expected_creds):
         from kallithea.lib.utils2 import credentials_filter
         assert credentials_filter(test_url) == expected_creds
 
-    @parametrize('str_bool,expected', [
+    @base.parametrize('str_bool,expected', [
                            ('t', True),
                            ('true', True),
                            ('y', True),
@@ -141,21 +141,21 @@
             'marian.user', 'marco-polo', 'marco_polo', 'world'])
         assert expected == set(extract_mentioned_usernames(sample))
 
-    @parametrize('age_args,expected', [
-        (dict(), u'just now'),
-        (dict(seconds= -1), u'1 second ago'),
-        (dict(seconds= -60 * 2), u'2 minutes ago'),
-        (dict(hours= -1), u'1 hour ago'),
-        (dict(hours= -24), u'1 day ago'),
-        (dict(hours= -24 * 5), u'5 days ago'),
-        (dict(months= -1), u'1 month ago'),
-        (dict(months= -1, days= -2), u'1 month and 2 days ago'),
-        (dict(months= -1, days= -20), u'1 month and 19 days ago'),
-        (dict(years= -1, months= -1), u'1 year and 1 month ago'),
-        (dict(years= -1, months= -10), u'1 year and 10 months ago'),
-        (dict(years= -2, months= -4), u'2 years and 4 months ago'),
-        (dict(years= -2, months= -11), u'2 years and 11 months ago'),
-        (dict(years= -3, months= -2), u'3 years and 2 months ago'),
+    @base.parametrize('age_args,expected', [
+        (dict(), 'just now'),
+        (dict(seconds= -1), '1 second ago'),
+        (dict(seconds= -60 * 2), '2 minutes ago'),
+        (dict(hours= -1), '1 hour ago'),
+        (dict(hours= -24), '1 day ago'),
+        (dict(hours= -24 * 5), '5 days ago'),
+        (dict(months= -1), '1 month ago'),
+        (dict(months= -1, days= -2), '1 month and 2 days ago'),
+        (dict(months= -1, days= -20), '1 month and 19 days ago'),
+        (dict(years= -1, months= -1), '1 year and 1 month ago'),
+        (dict(years= -1, months= -10), '1 year and 10 months ago'),
+        (dict(years= -2, months= -4), '2 years and 4 months ago'),
+        (dict(years= -2, months= -11), '2 years and 11 months ago'),
+        (dict(years= -3, months= -2), '3 years and 2 months ago'),
     ])
     def test_age(self, age_args, expected):
         from kallithea.lib.utils2 import age
@@ -165,22 +165,22 @@
             delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
             assert age(n + delt(**age_args), now=n) == expected
 
-    @parametrize('age_args,expected', [
-        (dict(), u'just now'),
-        (dict(seconds= -1), u'1 second ago'),
-        (dict(seconds= -60 * 2), u'2 minutes ago'),
-        (dict(hours= -1), u'1 hour ago'),
-        (dict(hours= -24), u'1 day ago'),
-        (dict(hours= -24 * 5), u'5 days ago'),
-        (dict(months= -1), u'1 month ago'),
-        (dict(months= -1, days= -2), u'1 month ago'),
-        (dict(months= -1, days= -20), u'1 month ago'),
-        (dict(years= -1, months= -1), u'13 months ago'),
-        (dict(years= -1, months= -10), u'22 months ago'),
-        (dict(years= -2, months= -4), u'2 years ago'),
-        (dict(years= -2, months= -11), u'3 years ago'),
-        (dict(years= -3, months= -2), u'3 years ago'),
-        (dict(years= -4, months= -8), u'5 years ago'),
+    @base.parametrize('age_args,expected', [
+        (dict(), 'just now'),
+        (dict(seconds= -1), '1 second ago'),
+        (dict(seconds= -60 * 2), '2 minutes ago'),
+        (dict(hours= -1), '1 hour ago'),
+        (dict(hours= -24), '1 day ago'),
+        (dict(hours= -24 * 5), '5 days ago'),
+        (dict(months= -1), '1 month ago'),
+        (dict(months= -1, days= -2), '1 month ago'),
+        (dict(months= -1, days= -20), '1 month ago'),
+        (dict(years= -1, months= -1), '13 months ago'),
+        (dict(years= -1, months= -10), '22 months ago'),
+        (dict(years= -2, months= -4), '2 years ago'),
+        (dict(years= -2, months= -11), '3 years ago'),
+        (dict(years= -3, months= -2), '3 years ago'),
+        (dict(years= -4, months= -8), '5 years ago'),
     ])
     def test_age_short(self, age_args, expected):
         from kallithea.lib.utils2 import age
@@ -190,16 +190,16 @@
             delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
             assert age(n + delt(**age_args), show_short_version=True, now=n) == expected
 
-    @parametrize('age_args,expected', [
-        (dict(), u'just now'),
-        (dict(seconds=1), u'in 1 second'),
-        (dict(seconds=60 * 2), u'in 2 minutes'),
-        (dict(hours=1), u'in 1 hour'),
-        (dict(hours=24), u'in 1 day'),
-        (dict(hours=24 * 5), u'in 5 days'),
-        (dict(months=1), u'in 1 month'),
-        (dict(months=1, days=1), u'in 1 month and 1 day'),
-        (dict(years=1, months=1), u'in 1 year and 1 month')
+    @base.parametrize('age_args,expected', [
+        (dict(), 'just now'),
+        (dict(seconds=1), 'in 1 second'),
+        (dict(seconds=60 * 2), 'in 2 minutes'),
+        (dict(hours=1), 'in 1 hour'),
+        (dict(hours=24), 'in 1 day'),
+        (dict(hours=24 * 5), 'in 5 days'),
+        (dict(months=1), 'in 1 month'),
+        (dict(months=1, days=1), 'in 1 month and 1 day'),
+        (dict(years=1, months=1), 'in 1 year and 1 month')
     ])
     def test_age_in_future(self, age_args, expected):
         from kallithea.lib.utils2 import age
@@ -227,7 +227,7 @@
 
     def test_alternative_gravatar(self):
         from kallithea.lib.helpers import gravatar_url
-        _md5 = lambda s: hashlib.md5(s).hexdigest()
+        _md5 = lambda s: hashlib.md5(safe_bytes(s)).hexdigest()
 
         # mock tg.tmpl_context
         def fake_tmpl_context(_url):
@@ -270,7 +270,7 @@
                 grav = gravatar_url(email_address=em, size=24)
                 assert grav == 'https://example.com/%s/%s' % (_md5(em), 24)
 
-    @parametrize('clone_uri_tmpl,repo_name,username,prefix,expected', [
+    @base.parametrize('clone_uri_tmpl,repo_name,username,prefix,expected', [
         (Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '', 'http://vps1:8000/group/repo1'),
         (Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '', 'http://username@vps1:8000/group/repo1'),
         (Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '/prefix', 'http://vps1:8000/prefix/group/repo1'),
@@ -307,7 +307,7 @@
             return tmpl % (url_ or '/repo_name/changeset/%s' % _url, _url)
         return url_pattern.sub(url_func, text)
 
-    @parametrize('sample,expected', [
+    @base.parametrize('sample,expected', [
       ("",
        ""),
       ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
@@ -341,7 +341,7 @@
             from kallithea.lib.helpers import urlify_text
             assert urlify_text(sample, 'repo_name') == expected
 
-    @parametrize('sample,expected,url_', [
+    @base.parametrize('sample,expected,url_', [
       ("",
        "",
        ""),
@@ -396,7 +396,7 @@
             from kallithea.lib.helpers import urlify_text
             assert urlify_text(sample, 'repo_name', stylize=True) == expected
 
-    @parametrize('sample,expected', [
+    @base.parametrize('sample,expected', [
       ("deadbeefcafe @mention, and http://foo.bar/ yo",
        """<a class="changeset_hash" href="/repo_name/changeset/deadbeefcafe">deadbeefcafe</a>"""
        """<a class="message-link" href="#the-link"> <b>@mention</b>, and </a>"""
@@ -409,7 +409,7 @@
             from kallithea.lib.helpers import urlify_text
             assert urlify_text(sample, 'repo_name', link_='#the-link') == expected
 
-    @parametrize('issue_pat,issue_server,issue_sub,sample,expected', [
+    @base.parametrize('issue_pat,issue_server,issue_sub,sample,expected', [
         (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1',
             'issue #123 and issue#456',
             """issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """
@@ -482,7 +482,7 @@
             """empty issue_sub <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">$123</a> and """
             """issue$456"""),
         # named groups
-        (r'(PR|pullrequest|pull request) ?(?P<sitecode>BRU|CPH|BER)-(?P<id>\d+)', 'http://foo/\g<sitecode>/pullrequest/\g<id>/', 'PR-\g<sitecode>-\g<id>',
+        (r'(PR|pullrequest|pull request) ?(?P<sitecode>BRU|CPH|BER)-(?P<id>\d+)', r'http://foo/\g<sitecode>/pullrequest/\g<id>/', r'PR-\g<sitecode>-\g<id>',
             'pullrequest CPH-789 is similar to PRBRU-747',
             """<a class="issue-tracker-link" href="http://foo/CPH/pullrequest/789/">PR-CPH-789</a> is similar to """
             """<a class="issue-tracker-link" href="http://foo/BRU/pullrequest/747/">PR-BRU-747</a>"""),
@@ -500,7 +500,7 @@
             with mock.patch('kallithea.CONFIG', config_stub):
                 assert urlify_text(sample, 'repo_name') == expected
 
-    @parametrize('sample,expected', [
+    @base.parametrize('sample,expected', [
         ('abc X5', 'abc <a class="issue-tracker-link" href="http://main/repo_name/main/5/">#5</a>'),
         ('abc pullrequest #6 xyz', 'abc <a class="issue-tracker-link" href="http://pr/repo_name/pr/6">PR#6</a> xyz'),
         ('pull request7 #', '<a class="issue-tracker-link" href="http://pr/repo_name/pr/7">PR#7</a> #'),
@@ -512,28 +512,28 @@
     def test_urlify_issues_multiple_issue_patterns(self, sample, expected):
         from kallithea.lib.helpers import urlify_text
         config_stub = {
-            'sqlalchemy.url': 'foo',
-            'issue_pat': 'X(\d+)',
-            'issue_server_link': 'http://main/{repo}/main/\\1/',
-            'issue_sub': '#\\1',
-            'issue_pat_pr': '(?:pullrequest|pull request|PR|pr) ?#?(\d+)',
-            'issue_server_link_pr': 'http://pr/{repo}/pr/\\1',
-            'issue_sub_pr': 'PR#\\1',
-            'issue_pat_bug': '(?:BUG|bug|issue) ?#?(\d+)',
-            'issue_server_link_bug': 'http://bug/{repo}/bug/\\1',
-            'issue_sub_bug': 'bug#\\1',
-            'issue_pat_empty_prefix': 'FAIL(\d+)',
-            'issue_server_link_empty_prefix': 'http://fail/{repo}/\\1',
-            'issue_sub_empty_prefix': '',
-            'issue_pat_absent_prefix': 'FAILMORE(\d+)',
-            'issue_server_link_absent_prefix': 'http://failmore/{repo}/\\1',
+            'sqlalchemy.url': r'foo',
+            'issue_pat': r'X(\d+)',
+            'issue_server_link': r'http://main/{repo}/main/\1/',
+            'issue_sub': r'#\1',
+            'issue_pat_pr': r'(?:pullrequest|pull request|PR|pr) ?#?(\d+)',
+            'issue_server_link_pr': r'http://pr/{repo}/pr/\1',
+            'issue_sub_pr': r'PR#\1',
+            'issue_pat_bug': r'(?:BUG|bug|issue) ?#?(\d+)',
+            'issue_server_link_bug': r'http://bug/{repo}/bug/\1',
+            'issue_sub_bug': r'bug#\1',
+            'issue_pat_empty_prefix': r'FAIL(\d+)',
+            'issue_server_link_empty_prefix': r'http://fail/{repo}/\1',
+            'issue_sub_empty_prefix': r'',
+            'issue_pat_absent_prefix': r'FAILMORE(\d+)',
+            'issue_server_link_absent_prefix': r'http://failmore/{repo}/\1',
         }
         # force recreation of lazy function
         with mock.patch('kallithea.lib.helpers._urlify_issues_f', None):
             with mock.patch('kallithea.CONFIG', config_stub):
                 assert urlify_text(sample, 'repo_name') == expected
 
-    @parametrize('test,expected', [
+    @base.parametrize('test,expected', [
       ("", None),
       ("/_2", None),
       ("_2", 2),
@@ -542,9 +542,9 @@
     def test_get_permanent_id(self, test, expected):
         from kallithea.lib.utils import _get_permanent_id
         extracted = _get_permanent_id(test)
-        assert extracted == expected, 'url:%s, got:`%s` expected: `%s`' % (test, _test, expected)
+        assert extracted == expected, 'url:%s, got:`%s` expected: `%s`' % (test, base._test, expected)
 
-    @parametrize('test,expected', [
+    @base.parametrize('test,expected', [
       ("", ""),
       ("/", "/"),
       ("/_ID", '/_ID'),
@@ -555,14 +555,14 @@
       ("_IDa", '_IDa'),
     ])
     def test_fix_repo_id_name(self, test, expected):
-        repo = Repository.get_by_repo_name(HG_REPO)
+        repo = Repository.get_by_repo_name(base.HG_REPO)
         test = test.replace('ID', str(repo.repo_id))
         expected = expected.replace('NAME', repo.repo_name).replace('ID', str(repo.repo_id))
         from kallithea.lib.utils import fix_repo_id_name
         replaced = fix_repo_id_name(test)
         assert replaced == expected, 'url:%s, got:`%s` expected: `%s`' % (test, replaced, expected)
 
-    @parametrize('canonical,test,expected', [
+    @base.parametrize('canonical,test,expected', [
         ('http://www.example.org/', '/abc/xyz', 'http://www.example.org/abc/xyz'),
         ('http://www.example.org', '/abc/xyz', 'http://www.example.org/abc/xyz'),
         ('http://www.example.org', '/abc/xyz/', 'http://www.example.org/abc/xyz/'),
@@ -590,7 +590,7 @@
             with mock.patch('kallithea.CONFIG', config_mock):
                 assert canonical_url(test) == expected
 
-    @parametrize('canonical,expected', [
+    @base.parametrize('canonical,expected', [
         ('http://www.example.org', 'www.example.org'),
         ('http://www.example.org/repos/', 'www.example.org'),
         ('http://www.example.org/kallithea/repos/', 'www.example.org'),
--- a/kallithea/tests/other/test_mail.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/other/test_mail.py	Sat May 02 21:20:43 2020 +0200
@@ -1,8 +1,10 @@
+# -*- coding: utf-8 -*-
+
 import mock
 
 import kallithea
 from kallithea.model.db import User
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
 class smtplib_mock(object):
@@ -25,7 +27,7 @@
 
 
 @mock.patch('kallithea.lib.rcmail.smtp_mailer.smtplib', smtplib_mock)
-class TestMail(TestController):
+class TestMail(base.TestController):
 
     def test_send_mail_trivial(self):
         mailserver = 'smtp.mailserver.org'
@@ -66,7 +68,7 @@
         with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
             kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body)
 
-        assert smtplib_mock.lastdest == set([TEST_USER_ADMIN_EMAIL, email_to])
+        assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL, email_to])
         assert smtplib_mock.lastsender == envelope_from
         assert 'From: %s' % envelope_from in smtplib_mock.lastmsg
         assert 'Subject: %s' % subject in smtplib_mock.lastmsg
@@ -90,7 +92,7 @@
         with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
             kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body)
 
-        assert smtplib_mock.lastdest == set([TEST_USER_ADMIN_EMAIL] + email_to.split(','))
+        assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL] + email_to.split(','))
         assert smtplib_mock.lastsender == envelope_from
         assert 'From: %s' % envelope_from in smtplib_mock.lastmsg
         assert 'Subject: %s' % subject in smtplib_mock.lastmsg
@@ -112,7 +114,7 @@
         with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
             kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body)
 
-        assert smtplib_mock.lastdest == set([TEST_USER_ADMIN_EMAIL])
+        assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL])
         assert smtplib_mock.lastsender == envelope_from
         assert 'From: %s' % envelope_from in smtplib_mock.lastmsg
         assert 'Subject: %s' % subject in smtplib_mock.lastmsg
@@ -126,14 +128,14 @@
         subject = 'subject'
         body = 'body'
         html_body = 'html_body'
-        author = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        author = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
 
         config_mock = {
             'smtp_server': mailserver,
             'app_email_from': envelope_from,
         }
         with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body, author=author)
+            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username)
 
         assert smtplib_mock.lastdest == set(recipients)
         assert smtplib_mock.lastsender == envelope_from
@@ -144,20 +146,20 @@
 
     def test_send_mail_with_author_full_mail_from(self):
         mailserver = 'smtp.mailserver.org'
-        recipients = ['rcpt1', 'rcpt2']
+        recipients = ['ræcpt1', 'receptor2 <rcpt2@example.com>', 'tæst@example.com', 'Tæst <test@example.com>']
         envelope_addr = 'noreply@mailserver.org'
-        envelope_from = 'Some Name <%s>' % envelope_addr
+        envelope_from = 'Söme Næme <%s>' % envelope_addr
         subject = 'subject'
         body = 'body'
         html_body = 'html_body'
-        author = User.get_by_username(TEST_USER_REGULAR_LOGIN)
+        author = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
 
         config_mock = {
             'smtp_server': mailserver,
             'app_email_from': envelope_from,
         }
         with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body, author=author)
+            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username)
 
         assert smtplib_mock.lastdest == set(recipients)
         assert smtplib_mock.lastsender == envelope_from
@@ -173,7 +175,7 @@
         subject = 'subject'
         body = 'body'
         html_body = 'html_body'
-        author = User(name='foo', lastname=u'(fubar) "baz"')
+        author = User(name='foo', lastname='(fubar) "baz"')
         headers = {'extra': 'yes'}
 
         config_mock = {
@@ -182,7 +184,7 @@
         }
         with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
             kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body,
-                                                     author=author, headers=headers)
+                                                     from_name=author.full_name_or_username, headers=headers)
 
         assert smtplib_mock.lastdest == set(recipients)
         assert smtplib_mock.lastsender == envelope_from
--- a/kallithea/tests/other/test_validators.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/other/test_validators.py	Sat May 02 21:20:43 2020 +0200
@@ -6,7 +6,7 @@
 from kallithea.model.meta import Session
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user_group import UserGroupModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
@@ -14,7 +14,7 @@
 
 
 @pytest.mark.usefixtures("test_context_fixture") # apply fixture for all test methods
-class TestRepoGroups(TestController):
+class TestRepoGroups(base.TestController):
 
     def teardown_method(self, method):
         Session.remove()
@@ -40,7 +40,7 @@
         with pytest.raises(formencode.Invalid):
             validator.to_python('.,')
         with pytest.raises(formencode.Invalid):
-            validator.to_python(TEST_USER_ADMIN_LOGIN)
+            validator.to_python(base.TEST_USER_ADMIN_LOGIN)
         assert 'test' == validator.to_python('test')
 
         validator = v.ValidUsername(edit=True, old_data={'user_id': 1})
@@ -49,31 +49,31 @@
         validator = v.ValidRepoUser()
         with pytest.raises(formencode.Invalid):
             validator.to_python('nouser')
-        assert TEST_USER_ADMIN_LOGIN == validator.to_python(TEST_USER_ADMIN_LOGIN)
+        assert base.TEST_USER_ADMIN_LOGIN == validator.to_python(base.TEST_USER_ADMIN_LOGIN)
 
     def test_ValidUserGroup(self):
         validator = v.ValidUserGroup()
         with pytest.raises(formencode.Invalid):
-            validator.to_python(u'default')
+            validator.to_python('default')
         with pytest.raises(formencode.Invalid):
-            validator.to_python(u'.,')
+            validator.to_python('.,')
 
-        gr = fixture.create_user_group(u'test')
-        gr2 = fixture.create_user_group(u'tes2')
+        gr = fixture.create_user_group('test')
+        gr2 = fixture.create_user_group('tes2')
         Session().commit()
         with pytest.raises(formencode.Invalid):
-            validator.to_python(u'test')
+            validator.to_python('test')
         assert gr.users_group_id is not None
         validator = v.ValidUserGroup(edit=True,
                                     old_data={'users_group_id':
                                               gr2.users_group_id})
 
         with pytest.raises(formencode.Invalid):
-            validator.to_python(u'test')
+            validator.to_python('test')
         with pytest.raises(formencode.Invalid):
-            validator.to_python(u'TesT')
+            validator.to_python('TesT')
         with pytest.raises(formencode.Invalid):
-            validator.to_python(u'TEST')
+            validator.to_python('TEST')
         UserGroupModel().delete(gr)
         UserGroupModel().delete(gr2)
         Session().commit()
@@ -82,11 +82,11 @@
         validator = v.ValidRepoGroup()
         model = RepoGroupModel()
         with pytest.raises(formencode.Invalid):
-            validator.to_python({'group_name': HG_REPO, })
-        gr = model.create(group_name=u'test_gr', group_description=u'desc',
+            validator.to_python({'group_name': base.HG_REPO, })
+        gr = model.create(group_name='test_gr', group_description='desc',
                           parent=None,
                           just_db=True,
-                          owner=TEST_USER_ADMIN_LOGIN)
+                          owner=base.TEST_USER_ADMIN_LOGIN)
         with pytest.raises(formencode.Invalid):
             validator.to_python({'group_name': gr.group_name, })
 
@@ -127,8 +127,8 @@
     def test_ValidAuth(self):
         validator = v.ValidAuth()
         valid_creds = {
-            'username': TEST_USER_REGULAR2_LOGIN,
-            'password': TEST_USER_REGULAR2_PASS,
+            'username': base.TEST_USER_REGULAR2_LOGIN,
+            'password': base.TEST_USER_REGULAR2_PASS,
         }
         invalid_creds = {
             'username': 'err',
@@ -145,12 +145,12 @@
             validator.to_python({'repo_name': ''})
 
         with pytest.raises(formencode.Invalid):
-            validator.to_python({'repo_name': HG_REPO})
+            validator.to_python({'repo_name': base.HG_REPO})
 
-        gr = RepoGroupModel().create(group_name=u'group_test',
-                                      group_description=u'desc',
+        gr = RepoGroupModel().create(group_name='group_test',
+                                      group_description='desc',
                                       parent=None,
-                                      owner=TEST_USER_ADMIN_LOGIN)
+                                      owner=base.TEST_USER_ADMIN_LOGIN)
         with pytest.raises(formencode.Invalid):
             validator.to_python({'repo_name': gr.group_name})
 
@@ -163,7 +163,7 @@
         # this uses ValidRepoName validator
         assert True
 
-    @parametrize('name,expected', [
+    @base.parametrize('name,expected', [
         ('test', 'test'), ('lolz!', 'lolz'), ('  aavv', 'aavv'),
         ('ala ma kota', 'ala-ma-kota'), ('@nooo', 'nooo'),
         ('$!haha lolz !', 'haha-lolz'), ('$$$$$', ''), ('{}OK!', 'OK'),
@@ -196,7 +196,7 @@
 
     def test_ValidPath(self):
             validator = v.ValidPath()
-            assert TESTS_TMP_PATH == validator.to_python(TESTS_TMP_PATH)
+            assert base.TESTS_TMP_PATH == validator.to_python(base.TESTS_TMP_PATH)
             with pytest.raises(formencode.Invalid):
                 validator.to_python('/no_such_dir')
 
@@ -205,20 +205,20 @@
 
         assert 'mail@python.org' == validator.to_python('MaiL@Python.org')
 
-        email = TEST_USER_REGULAR2_EMAIL
+        email = base.TEST_USER_REGULAR2_EMAIL
         with pytest.raises(formencode.Invalid):
             validator.to_python(email)
 
     def test_ValidSystemEmail(self):
         validator = v.ValidSystemEmail()
-        email = TEST_USER_REGULAR2_EMAIL
+        email = base.TEST_USER_REGULAR2_EMAIL
 
         assert email == validator.to_python(email)
         with pytest.raises(formencode.Invalid):
             validator.to_python('err')
 
     def test_LdapLibValidator(self):
-        if ldap_lib_installed:
+        if base.ldap_lib_installed:
             validator = v.LdapLibValidator()
             assert "DN" == validator.to_python('DN')
         else:
--- a/kallithea/tests/other/test_vcs_operations.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/other/test_vcs_operations.py	Sat May 02 21:20:43 2020 +0200
@@ -25,25 +25,24 @@
 
 """
 
-from __future__ import print_function
-
 import json
 import os
 import re
 import tempfile
 import time
-import urllib2
+import urllib.request
 from subprocess import PIPE, Popen
 from tempfile import _RandomNameSequence
 
 import pytest
 
 from kallithea import CONFIG
-from kallithea.model.db import CacheInvalidation, Repository, Ui, User, UserIpMap, UserLog
+from kallithea.lib.utils2 import ascii_bytes, safe_str
+from kallithea.model.db import Repository, Ui, User, UserIpMap, UserLog
 from kallithea.model.meta import Session
 from kallithea.model.ssh_key import SshKeyModel
 from kallithea.model.user import UserModel
-from kallithea.tests.base import *
+from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
 
@@ -64,18 +63,18 @@
 
 class SshVcsTest(object):
     public_keys = {
-        TEST_USER_REGULAR_LOGIN: u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost',
-        TEST_USER_ADMIN_LOGIN: u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost',
+        base.TEST_USER_REGULAR_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost',
+        base.TEST_USER_ADMIN_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost',
     }
 
     @classmethod
-    def repo_url_param(cls, webserver, repo_name, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS, client_ip=IP_ADDR):
+    def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR):
         user = User.get_by_username(username)
         if user.ssh_keys:
             ssh_key = user.ssh_keys[0]
         else:
             sshkeymodel = SshKeyModel()
-            ssh_key = sshkeymodel.create(user, u'test key', cls.public_keys[user.username])
+            ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username])
             Session().commit()
 
         return cls._ssh_param(repo_name, user, ssh_key, client_ip)
@@ -83,11 +82,11 @@
 # Mixins for using Mercurial and Git
 class HgVcsTest(object):
     repo_type = 'hg'
-    repo_name = HG_REPO
+    repo_name = base.HG_REPO
 
 class GitVcsTest(object):
     repo_type = 'git'
-    repo_name = GIT_REPO
+    repo_name = base.GIT_REPO
 
 # Combine mixins to give the combinations we want to parameterize tests with
 class HgHttpVcsTest(HgVcsTest, HttpVcsTest):
@@ -118,17 +117,17 @@
             ssh_key.user_ssh_key_id)
         return "ssh://someuser@somehost/%s""" % repo_name
 
-parametrize_vcs_test = parametrize('vt', [
+parametrize_vcs_test = base.parametrize('vt', [
     HgHttpVcsTest,
     GitHttpVcsTest,
     HgSshVcsTest,
     GitSshVcsTest,
 ])
-parametrize_vcs_test_hg = parametrize('vt', [
+parametrize_vcs_test_hg = base.parametrize('vt', [
     HgHttpVcsTest,
     HgSshVcsTest,
 ])
-parametrize_vcs_test_http = parametrize('vt', [
+parametrize_vcs_test_http = base.parametrize('vt', [
     HgHttpVcsTest,
     GitHttpVcsTest,
 ])
@@ -162,11 +161,11 @@
                 print('stderr:', stderr)
         if not ignoreReturnCode:
             assert p.returncode == 0
-        return stdout, stderr
+        return safe_str(stdout), safe_str(stderr)
 
 
 def _get_tmp_dir(prefix='vcs_operations-', suffix=''):
-    return tempfile.mkdtemp(dir=TESTS_TMP_PATH, prefix=prefix, suffix=suffix)
+    return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix)
 
 
 def _add_files(vcs, dest_dir, files_no=3):
@@ -177,7 +176,7 @@
     :param vcs:
     :param dest_dir:
     """
-    added_file = '%ssetup.py' % _RandomNameSequence().next()
+    added_file = '%ssetup.py' % next(_RandomNameSequence())
     open(os.path.join(dest_dir, added_file), 'a').close()
     Command(dest_dir).execute(vcs, 'add', added_file)
 
@@ -186,7 +185,7 @@
         author_str = 'User <%s>' % email
     else:
         author_str = 'User ǝɯɐᴎ <%s>' % email
-    for i in xrange(files_no):
+    for i in range(files_no):
         cmd = """echo "added_line%s" >> %s""" % (i, added_file)
         Command(dest_dir).execute(cmd)
         if vcs == 'hg':
@@ -242,7 +241,7 @@
 
 
 @pytest.mark.usefixtures("test_context_fixture")
-class TestVCSOperations(TestController):
+class TestVCSOperations(base.TestController):
 
     @classmethod
     def setup_class(cls):
@@ -262,16 +261,16 @@
     @pytest.fixture(scope="module")
     def testfork(self):
         # create fork so the repo stays untouched
-        git_fork_name = u'%s_fork%s' % (GIT_REPO, _RandomNameSequence().next())
-        fixture.create_fork(GIT_REPO, git_fork_name)
-        hg_fork_name = u'%s_fork%s' % (HG_REPO, _RandomNameSequence().next())
-        fixture.create_fork(HG_REPO, hg_fork_name)
+        git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence()))
+        fixture.create_fork(base.GIT_REPO, git_fork_name)
+        hg_fork_name = '%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence()))
+        fixture.create_fork(base.HG_REPO, hg_fork_name)
         return {'git': git_fork_name, 'hg': hg_fork_name}
 
     @parametrize_vcs_test
     def test_clone_repo_by_admin(self, webserver, vt):
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
 
         if vt.repo_type == 'git':
             assert 'Cloning into' in stdout + stderr
@@ -286,26 +285,26 @@
     @parametrize_vcs_test_http
     def test_clone_wrong_credentials(self, webserver, vt):
         clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!')
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
         if vt.repo_type == 'git':
             assert 'fatal: Authentication failed' in stderr
         elif vt.repo_type == 'hg':
             assert 'abort: authorization failed' in stderr
 
     def test_clone_git_dir_as_hg(self, webserver):
-        clone_url = HgHttpVcsTest.repo_url_param(webserver, GIT_REPO)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
+        clone_url = HgHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
         assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr
 
     def test_clone_hg_repo_as_git(self, webserver):
-        clone_url = GitHttpVcsTest.repo_url_param(webserver, HG_REPO)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
+        clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
         assert 'not found' in stderr
 
     @parametrize_vcs_test
     def test_clone_non_existing_path(self, webserver, vt):
         clone_url = vt.repo_url_param(webserver, 'trololo')
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
         if vt.repo_type == 'git':
             assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr
         elif vt.repo_type == 'hg':
@@ -318,30 +317,30 @@
         Session().commit()
 
         # Create an empty server repo using the API
-        repo_name = u'new_%s_%s' % (vt.repo_type, _RandomNameSequence().next())
-        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
+        repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence()))
+        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         params = {
             "id": 7,
             "api_key": usr.api_key,
             "method": 'create_repo',
             "args": dict(repo_name=repo_name,
-                         owner=TEST_USER_ADMIN_LOGIN,
+                         owner=base.TEST_USER_ADMIN_LOGIN,
                          repo_type=vt.repo_type),
         }
-        req = urllib2.Request(
+        req = urllib.request.Request(
             'http://%s:%s/_admin/api' % webserver.server_address,
-            data=json.dumps(params),
+            data=ascii_bytes(json.dumps(params)),
             headers={'content-type': 'application/json'})
-        response = urllib2.urlopen(req)
+        response = urllib.request.urlopen(req)
         result = json.loads(response.read())
         # Expect something like:
         # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None}
-        assert result[u'result'][u'success']
+        assert result['result']['success']
 
         # Create local clone of the empty server repo
         local_clone_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, repo_name)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir)
 
         # Make 3 commits and push to the empty server repo.
         # The server repo doesn't have any other heads than the
@@ -361,15 +360,15 @@
         # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')>
         action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
         assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([
-            (u'started_following_repo', 0),
-            (u'user_created_repo', 0),
-            (u'pull', 0),
-            (u'push', 3)]
+            ('started_following_repo', 0),
+            ('user_created_repo', 0),
+            ('pull', 0),
+            ('push', 3)]
             if vt.repo_type == 'git' else [
-            (u'started_following_repo', 0),
-            (u'user_created_repo', 0),
+            ('started_following_repo', 0),
+            ('user_created_repo', 0),
             # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones
-            (u'push', 3)])
+            ('push', 3)])
 
     @parametrize_vcs_test
     def test_push_new_file(self, webserver, testfork, vt):
@@ -378,7 +377,7 @@
 
         dest_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 
         clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
         stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url)
@@ -392,7 +391,7 @@
 
         action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
         assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
-            [(u'pull', 0), (u'push', 3)]
+            [('pull', 0), ('push', 3)]
 
     @parametrize_vcs_test
     def test_pull(self, webserver, testfork, vt):
@@ -400,7 +399,7 @@
         Session().commit()
 
         dest_dir = _get_tmp_dir()
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir)
 
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
         stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url)
@@ -411,7 +410,7 @@
             assert 'new changesets' in stdout
 
         action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)]
-        assert action_parts == [u'pull']
+        assert action_parts == ['pull']
 
         # Test handling of URLs with extra '/' around repo_name
         stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/./%s/' % vt.repo_name), ignoreReturnCode=True)
@@ -443,36 +442,25 @@
     def test_push_invalidates_cache(self, webserver, testfork, vt):
         pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
 
-        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
-                                               == testfork[vt.repo_type]).scalar()
-        if not key:
-            key = CacheInvalidation(testfork[vt.repo_type], testfork[vt.repo_type])
-            Session().add(key)
-
-        key.cache_active = True
-        Session().commit()
-
         dest_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 
         stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url)
 
+        Session().commit()  # expire test session to make sure SA fetch new Repository instances after last_changeset has been updated server side hook in other process
+
         if vt.repo_type == 'git':
             _check_proper_git_push(stdout, stderr)
 
         post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
         assert pre_cached_tip != post_cached_tip
 
-        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
-                                               == testfork[vt.repo_type]).all()
-        assert key == []
-
     @parametrize_vcs_test_http
     def test_push_wrong_credentials(self, webserver, vt):
         dest_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 
         clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name')
         stdout, stderr = _add_files_and_push(webserver, vt, dest_dir,
@@ -489,8 +477,8 @@
         Session().commit()
 
         dest_dir = _get_tmp_dir()
-        clone_url = vt.repo_url_param(webserver, vt.repo_name, username=TEST_USER_REGULAR_LOGIN, password=TEST_USER_REGULAR_PASS)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
+        clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 
         stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url)
 
@@ -501,13 +489,13 @@
 
         action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
         assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
-            [(u'pull', 0)]
+            [('pull', 0)]
 
     @parametrize_vcs_test
     def test_push_back_to_wrong_url(self, webserver, vt):
         dest_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 
         stdout, stderr = _add_files_and_push(
             webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % (
@@ -524,12 +512,12 @@
         user_model = UserModel()
         try:
             # Add IP constraint that excludes the test context:
-            user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
+            user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
             Session().commit()
             # IP permissions are cached, need to wait for the cache in the server process to expire
             time.sleep(1.5)
             clone_url = vt.repo_url_param(webserver, vt.repo_name)
-            stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
+            stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
             if vt.repo_type == 'git':
                 # The message apparently changed in Git 1.8.3, so match it loosely.
                 assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr
@@ -544,7 +532,7 @@
             time.sleep(1.5)
 
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
 
         if vt.repo_type == 'git':
             assert 'Cloning into' in stdout + stderr
@@ -563,9 +551,9 @@
         Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
         Session().commit()
         # clone repo
-        clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS)
+        clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
         dest_dir = _get_tmp_dir()
-        stdout, stderr = Command(TESTS_TMP_PATH) \
+        stdout, stderr = Command(base.TESTS_TMP_PATH) \
             .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True)
         if vt.repo_type == 'hg':
             assert 'preoutgoing.testhook hook failed' in stdout
@@ -578,9 +566,9 @@
         Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
         Session().commit()
         # clone repo
-        clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS)
+        clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
         dest_dir = _get_tmp_dir()
-        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 
         stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url,
                                              ignoreReturnCode=True)
@@ -621,19 +609,19 @@
 
     def test_add_submodule_git(self, webserver, testfork):
         dest_dir = _get_tmp_dir()
-        clone_url = GitHttpVcsTest.repo_url_param(webserver, GIT_REPO)
+        clone_url = GitHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
 
         fork_url = GitHttpVcsTest.repo_url_param(webserver, testfork['git'])
 
         # add submodule
-        stdout, stderr = Command(TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir)
+        stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir)
         stdout, stderr = Command(dest_dir).execute('git submodule add', clone_url, 'testsubmodule')
-        stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=TEST_USER_ADMIN_EMAIL)
+        stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=base.TEST_USER_ADMIN_EMAIL)
         stdout, stderr = Command(dest_dir).execute('git push', fork_url, 'master')
 
         # check for testsubmodule link in files page
         self.log_user()
-        response = self.app.get(url(controller='files', action='index',
+        response = self.app.get(base.url(controller='files', action='index',
                                     repo_name=testfork['git'],
                                     revision='tip',
                                     f_path='/'))
@@ -643,7 +631,7 @@
         response.mustcontain('<a class="submodule-dir" href="%s" target="_blank"><i class="icon-file-submodule"></i><span>testsubmodule @ ' % clone_url)
 
         # check that following a submodule link actually works - and redirects
-        response = self.app.get(url(controller='files', action='index',
+        response = self.app.get(base.url(controller='files', action='index',
                                     repo_name=testfork['git'],
                                     revision='tip',
                                     f_path='/testsubmodule'),
--- a/kallithea/tests/performance/test_vcs.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/performance/test_vcs.py	Sat May 02 21:20:43 2020 +0200
@@ -15,11 +15,11 @@
 import pytest
 
 from kallithea.model.db import Repository
-from kallithea.tests.base import *
+from kallithea.tests import base
 
 
-@pytest.mark.skipif("not os.environ.has_key('TEST_PERFORMANCE')", reason="skipping performance tests, set TEST_PERFORMANCE in environment if desired")
-class TestVCSPerformance(TestController):
+@pytest.mark.skipif("'TEST_PERFORMANCE' not in os.environ", reason="skipping performance tests, set TEST_PERFORMANCE in environment if desired")
+class TestVCSPerformance(base.TestController):
 
     def graphmod(self, repo):
         """ Simple test for running the graph_data function for profiling/testing performance. """
@@ -31,7 +31,7 @@
         jsdata = graph_data(scm_inst, revs)
 
     def test_graphmod_hg(self, benchmark):
-        benchmark(self.graphmod, HG_REPO)
+        benchmark(self.graphmod, base.HG_REPO)
 
     def test_graphmod_git(self, benchmark):
-        benchmark(self.graphmod, GIT_REPO)
+        benchmark(self.graphmod, base.GIT_REPO)
--- a/kallithea/tests/scripts/manual_test_concurrency.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/scripts/manual_test_concurrency.py	Sat May 02 21:20:43 2020 +0200
@@ -26,8 +26,6 @@
 
 """
 
-from __future__ import print_function
-
 import logging
 import os
 import shutil
@@ -41,7 +39,6 @@
 
 from kallithea.config.environment import load_environment
 from kallithea.lib.auth import get_crypt_password
-from kallithea.lib.utils import setup_cache_regions
 from kallithea.model import meta
 from kallithea.model.base import init_model
 from kallithea.model.db import Repository, Ui, User
@@ -52,8 +49,6 @@
 conf = appconfig('config:development.ini', relative_to=rel_path)
 load_environment(conf.global_conf, conf.local_conf)
 
-setup_cache_regions(conf)
-
 USER = TEST_USER_ADMIN_LOGIN
 PASS = TEST_USER_ADMIN_PASS
 HOST = 'server.local'
@@ -205,7 +200,7 @@
             backend = 'hg'
 
         if METHOD == 'pull':
-            seq = tempfile._RandomNameSequence().next()
+            seq = next(tempfile._RandomNameSequence())
             test_clone_with_credentials(repo=sys.argv[1], method='clone',
                                         backend=backend)
         s = time.time()
--- a/kallithea/tests/scripts/manual_test_crawler.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/scripts/manual_test_crawler.py	Sat May 02 21:20:43 2020 +0200
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -30,15 +30,13 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-from __future__ import print_function
-
-import cookielib
+import http.cookiejar
 import os
 import sys
 import tempfile
 import time
-import urllib
-import urllib2
+import urllib.parse
+import urllib.request
 from os.path import dirname
 
 from kallithea.lib import vcs
@@ -72,18 +70,18 @@
 ]
 
 
-cj = cookielib.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt'))
-o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
+cj = http.cookiejar.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt'))
+o = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
 o.addheaders = [
     ('User-agent', 'kallithea-crawler'),
     ('Accept-Language', 'en - us, en;q = 0.5')
 ]
 
-urllib2.install_opener(o)
+urllib.request.install_opener(o)
 
 
 def _get_repo(proj):
-    if isinstance(proj, basestring):
+    if isinstance(proj, str):
         repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj))
         proj = proj
     else:
@@ -101,7 +99,7 @@
 
         page = '/'.join((proj, 'changelog',))
 
-        full_uri = (BASE_URI % page) + '?' + urllib.urlencode({'page': i})
+        full_uri = (BASE_URI % page) + '?' + urllib.parse.urlencode({'page': i})
         s = time.time()
         f = o.open(full_uri)
 
@@ -130,13 +128,13 @@
             break
 
         full_uri = (BASE_URI % raw_cs)
-        print('%s visiting %s\%s' % (cnt, full_uri, i))
+        print('%s visiting %s/%s' % (cnt, full_uri, i))
         s = time.time()
         f = o.open(full_uri)
         size = len(f.read())
         e = time.time() - s
         total_time += e
-        print('%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e))
+        print('%s visited %s/%s size:%s req:%s ms' % (cnt, full_uri, i, size, e))
 
     print('total_time', total_time)
     print('average on req', total_time / float(cnt))
--- a/kallithea/tests/vcs/base.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/base.py	Sat May 02 21:20:43 2020 +0200
@@ -27,8 +27,8 @@
     def _get_commits(cls):
         commits = [
             {
-                'message': u'Initial commit',
-                'author': u'Joe Doe <joe.doe@example.com>',
+                'message': 'Initial commit',
+                'author': 'Joe Doe <joe.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 20),
                 'added': [
                     FileNode('foobar', content='Foobar'),
@@ -37,8 +37,8 @@
                 ],
             },
             {
-                'message': u'Changes...',
-                'author': u'Jane Doe <jane.doe@example.com>',
+                'message': 'Changes...',
+                'author': 'Jane Doe <jane.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 21),
                 'added': [
                     FileNode('some/new.txt', content='news...'),
@@ -79,8 +79,8 @@
             for node in commit.get('removed', []):
                 cls.imc.remove(FileNode(node.path))
 
-            cls.tip = cls.imc.commit(message=unicode(commit['message']),
-                                     author=unicode(commit['author']),
+            cls.tip = cls.imc.commit(message=commit['message'],
+                                     author=commit['author'],
                                      date=commit['date'])
 
     @pytest.fixture(autouse=True)
--- a/kallithea/tests/vcs/conf.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/conf.py	Sat May 02 21:20:43 2020 +0200
@@ -7,8 +7,8 @@
 # Retrieve the necessary configuration options from the test base
 # module. Some of these configuration options are subsequently
 # consumed by the VCS test module.
-from kallithea.tests.base import (
-    GIT_REMOTE_REPO, HG_REMOTE_REPO, TEST_GIT_REPO, TEST_GIT_REPO_CLONE, TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL, TESTS_TMP_PATH)
+from kallithea.tests.base import (GIT_REMOTE_REPO, HG_REMOTE_REPO, TEST_GIT_REPO, TEST_GIT_REPO_CLONE, TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL,
+                                  TESTS_TMP_PATH)
 
 
 __all__ = (
--- a/kallithea/tests/vcs/test_archives.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_archives.py	Sat May 02 21:20:43 2020 +0200
@@ -1,6 +1,6 @@
 import datetime
+import io
 import os
-import StringIO
 import tarfile
 import tempfile
 import zipfile
@@ -18,7 +18,7 @@
     @classmethod
     def _get_commits(cls):
         start_date = datetime.datetime(2010, 1, 1, 20)
-        for x in xrange(5):
+        for x in range(5):
             yield {
                 'message': 'Commit %d' % x,
                 'author': 'Joe Doe <joe.doe@example.com>',
@@ -35,11 +35,10 @@
             self.tip.fill_archive(stream=f, kind='zip', prefix='repo')
         out = zipfile.ZipFile(path)
 
-        for x in xrange(5):
+        for x in range(5):
             node_path = '%d/file_%d.txt' % (x, x)
-            decompressed = StringIO.StringIO()
-            decompressed.write(out.read('repo/' + node_path))
-            assert decompressed.getvalue() == self.tip.get_node(node_path).content
+            decompressed = out.read('repo/' + node_path)
+            assert decompressed == self.tip.get_node(node_path).content
 
     def test_archive_tgz(self):
         path = tempfile.mkstemp(dir=TESTS_TMP_PATH, prefix='test_archive_tgz-')[1]
@@ -50,9 +49,9 @@
         outfile = tarfile.open(path, 'r|gz')
         outfile.extractall(outdir)
 
-        for x in xrange(5):
+        for x in range(5):
             node_path = '%d/file_%d.txt' % (x, x)
-            assert open(os.path.join(outdir, 'repo/' + node_path)).read() == self.tip.get_node(node_path).content
+            assert open(os.path.join(outdir, 'repo/' + node_path), 'rb').read() == self.tip.get_node(node_path).content
 
     def test_archive_tbz2(self):
         path = tempfile.mkstemp(dir=TESTS_TMP_PATH, prefix='test_archive_tbz2-')[1]
@@ -63,15 +62,15 @@
         outfile = tarfile.open(path, 'r|bz2')
         outfile.extractall(outdir)
 
-        for x in xrange(5):
+        for x in range(5):
             node_path = '%d/file_%d.txt' % (x, x)
-            assert open(os.path.join(outdir, 'repo/' + node_path)).read() == self.tip.get_node(node_path).content
+            assert open(os.path.join(outdir, 'repo/' + node_path), 'rb').read() == self.tip.get_node(node_path).content
 
     def test_archive_default_stream(self):
         tmppath = tempfile.mkstemp(dir=TESTS_TMP_PATH, prefix='test_archive_default_stream-')[1]
         with open(tmppath, 'wb') as stream:
             self.tip.fill_archive(stream=stream)
-        mystream = StringIO.StringIO()
+        mystream = io.BytesIO()
         self.tip.fill_archive(stream=mystream)
         mystream.seek(0)
         with open(tmppath, 'rb') as f:
--- a/kallithea/tests/vcs/test_branches.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_branches.py	Sat May 02 21:20:43 2020 +0200
@@ -46,8 +46,8 @@
         self.imc.add(vcs.nodes.FileNode('docs/index.txt',
             content='Documentation\n'))
         foobar_tip = self.imc.commit(
-            message=u'New branch: foobar',
-            author=u'joe',
+            message='New branch: foobar',
+            author='joe',
             branch='foobar',
         )
         assert 'foobar' in self.repo.branches
@@ -59,23 +59,23 @@
         self.imc.add(vcs.nodes.FileNode('docs/index.txt',
             content='Documentation\n'))
         foobar_tip = self.imc.commit(
-            message=u'New branch: foobar',
-            author=u'joe',
+            message='New branch: foobar',
+            author='joe',
             branch='foobar',
             parents=[tip],
         )
         self.imc.change(vcs.nodes.FileNode('docs/index.txt',
             content='Documentation\nand more...\n'))
         newtip = self.imc.commit(
-            message=u'At default branch',
-            author=u'joe',
+            message='At default branch',
+            author='joe',
             branch=foobar_tip.branch,
             parents=[foobar_tip],
         )
 
         newest_tip = self.imc.commit(
-            message=u'Merged with %s' % foobar_tip.raw_id,
-            author=u'joe',
+            message='Merged with %s' % foobar_tip.raw_id,
+            author='joe',
             branch=self.backend_class.DEFAULT_BRANCH_NAME,
             parents=[newtip, foobar_tip],
         )
@@ -85,16 +85,16 @@
 
     def test_branch_with_slash_in_name(self):
         self.imc.add(vcs.nodes.FileNode('extrafile', content='Some data\n'))
-        self.imc.commit(u'Branch with a slash!', author=u'joe',
+        self.imc.commit('Branch with a slash!', author='joe',
             branch='issue/123')
         assert 'issue/123' in self.repo.branches
 
     def test_branch_with_slash_in_name_and_similar_without(self):
         self.imc.add(vcs.nodes.FileNode('extrafile', content='Some data\n'))
-        self.imc.commit(u'Branch with a slash!', author=u'joe',
+        self.imc.commit('Branch with a slash!', author='joe',
             branch='issue/123')
         self.imc.add(vcs.nodes.FileNode('extrafile II', content='Some data\n'))
-        self.imc.commit(u'Branch without a slash...', author=u'joe',
+        self.imc.commit('Branch without a slash...', author='joe',
             branch='123')
         assert 'issue/123' in self.repo.branches
         assert '123' in self.repo.branches
--- a/kallithea/tests/vcs/test_changesets.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_changesets.py	Sat May 02 21:20:43 2020 +0200
@@ -15,18 +15,16 @@
 
     def test_as_dict(self):
         changeset = BaseChangeset()
-        changeset.id = 'ID'
         changeset.raw_id = 'RAW_ID'
         changeset.short_id = 'SHORT_ID'
         changeset.revision = 1009
         changeset.date = datetime.datetime(2011, 1, 30, 1, 45)
         changeset.message = 'Message of a commit'
         changeset.author = 'Joe Doe <joe.doe@example.com>'
-        changeset.added = [FileNode('foo/bar/baz'), FileNode(u'foobar'), FileNode(u'blåbærgrød')]
+        changeset.added = [FileNode('foo/bar/baz'), FileNode('foobar'), FileNode('blåbærgrød')]
         changeset.changed = []
         changeset.removed = []
         assert changeset.as_dict() == {
-            'id': 'ID',
             'raw_id': 'RAW_ID',
             'short_id': 'SHORT_ID',
             'revision': 1009,
@@ -36,7 +34,7 @@
                 'name': 'Joe Doe',
                 'email': 'joe.doe@example.com',
             },
-            'added': ['foo/bar/baz', 'foobar', u'bl\xe5b\xe6rgr\xf8d'],
+            'added': ['foo/bar/baz', 'foobar', 'bl\xe5b\xe6rgr\xf8d'],
             'changed': [],
             'removed': [],
         }
@@ -47,7 +45,7 @@
     @classmethod
     def _get_commits(cls):
         start_date = datetime.datetime(2010, 1, 1, 20)
-        for x in xrange(5):
+        for x in range(5):
             yield {
                 'message': 'Commit %d' % x,
                 'author': 'Joe Doe <joe.doe@example.com>',
@@ -61,15 +59,15 @@
         self.imc.add(vcs.nodes.FileNode('docs/index.txt',
             content='Documentation\n'))
         foobar_tip = self.imc.commit(
-            message=u'New branch: foobar',
-            author=u'joe',
+            message='New branch: foobar',
+            author='joe',
             branch='foobar',
         )
         assert 'foobar' in self.repo.branches
         assert foobar_tip.branch == 'foobar'
         assert foobar_tip.branches == ['foobar']
         # 'foobar' should be the only branch that contains the new commit
-        branch_tips = self.repo.branches.values()
+        branch_tips = list(self.repo.branches.values())
         assert branch_tips.count(str(foobar_tip.raw_id)) == 1
 
     def test_new_head_in_default_branch(self):
@@ -77,23 +75,23 @@
         self.imc.add(vcs.nodes.FileNode('docs/index.txt',
             content='Documentation\n'))
         foobar_tip = self.imc.commit(
-            message=u'New branch: foobar',
-            author=u'joe',
+            message='New branch: foobar',
+            author='joe',
             branch='foobar',
             parents=[tip],
         )
         self.imc.change(vcs.nodes.FileNode('docs/index.txt',
             content='Documentation\nand more...\n'))
         newtip = self.imc.commit(
-            message=u'At default branch',
-            author=u'joe',
+            message='At default branch',
+            author='joe',
             branch=foobar_tip.branch,
             parents=[foobar_tip],
         )
 
         newest_tip = self.imc.commit(
-            message=u'Merged with %s' % foobar_tip.raw_id,
-            author=u'joe',
+            message='Merged with %s' % foobar_tip.raw_id,
+            author='joe',
             branch=self.backend_class.DEFAULT_BRANCH_NAME,
             parents=[newtip, foobar_tip],
         )
@@ -106,14 +104,14 @@
         self.imc.add(vcs.nodes.FileNode('docs/index.txt',
             content='Documentation\n'))
         doc_changeset = self.imc.commit(
-            message=u'New branch: docs',
-            author=u'joe',
+            message='New branch: docs',
+            author='joe',
             branch='docs',
         )
         self.imc.add(vcs.nodes.FileNode('newfile', content=''))
         self.imc.commit(
-            message=u'Back in default branch',
-            author=u'joe',
+            message='Back in default branch',
+            author='joe',
             parents=[tip],
         )
         default_branch_changesets = self.repo.get_changesets(
@@ -121,11 +119,11 @@
         assert doc_changeset not in default_branch_changesets
 
     def test_get_changeset_by_branch(self):
-        for branch, sha in self.repo.branches.iteritems():
+        for branch, sha in self.repo.branches.items():
             assert sha == self.repo.get_changeset(branch).raw_id
 
     def test_get_changeset_by_tag(self):
-        for tag, sha in self.repo.tags.iteritems():
+        for tag, sha in self.repo.tags.items():
             assert sha == self.repo.get_changeset(tag).raw_id
 
     def test_get_changeset_parents(self):
@@ -145,10 +143,10 @@
     @classmethod
     def _get_commits(cls):
         start_date = datetime.datetime(2010, 1, 1, 20)
-        for x in xrange(5):
+        for x in range(5):
             yield {
-                'message': u'Commit %d' % x,
-                'author': u'Joe Doe <joe.doe@example.com>',
+                'message': 'Commit %d' % x,
+                'author': 'Joe Doe <joe.doe@example.com>',
                 'date': start_date + datetime.timedelta(hours=12 * x),
                 'added': [
                     FileNode('file_%d.txt' % x, content='Foobar %d' % x),
@@ -240,7 +238,7 @@
     def test_get_filenodes_generator(self):
         tip = self.repo.get_changeset()
         filepaths = [node.path for node in tip.get_filenodes_generator()]
-        assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
+        assert filepaths == ['file_%d.txt' % x for x in range(5)]
 
     def test_size(self):
         tip = self.repo.get_changeset()
@@ -249,15 +247,15 @@
 
     def test_author(self):
         tip = self.repo.get_changeset()
-        assert tip.author == u'Joe Doe <joe.doe@example.com>'
+        assert tip.author == 'Joe Doe <joe.doe@example.com>'
 
     def test_author_name(self):
         tip = self.repo.get_changeset()
-        assert tip.author_name == u'Joe Doe'
+        assert tip.author_name == 'Joe Doe'
 
     def test_author_email(self):
         tip = self.repo.get_changeset()
-        assert tip.author_email == u'joe.doe@example.com'
+        assert tip.author_email == 'joe.doe@example.com'
 
     def test_get_changesets_raise_changesetdoesnotexist_for_wrong_start(self):
         with pytest.raises(ChangesetDoesNotExistError):
@@ -299,8 +297,8 @@
     def _get_commits(cls):
         return [
             {
-                'message': u'Initial',
-                'author': u'Joe Doe <joe.doe@example.com>',
+                'message': 'Initial',
+                'author': 'Joe Doe <joe.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 20),
                 'added': [
                     FileNode('foo/bar', content='foo'),
@@ -310,8 +308,8 @@
                 ],
             },
             {
-                'message': u'Massive changes',
-                'author': u'Joe Doe <joe.doe@example.com>',
+                'message': 'Massive changes',
+                'author': 'Joe Doe <joe.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 22),
                 'added': [FileNode('fallout', content='War never changes')],
                 'changed': [
@@ -332,8 +330,8 @@
         ])
         assert list(changeset.changed) == []
         assert list(changeset.removed) == []
-        assert u'foo/ba\u0142' in changeset.as_dict()['added']
-        assert u'foo/ba\u0142' in changeset.__json__(with_file_list=True)['added']
+        assert 'foo/ba\u0142' in changeset.as_dict()['added']
+        assert 'foo/ba\u0142' in changeset.__json__(with_file_list=True)['added']
 
     def test_head_added(self):
         changeset = self.repo.get_changeset()
@@ -357,7 +355,7 @@
     def test_get_filemode_non_ascii(self):
         changeset = self.repo.get_changeset()
         assert 33188 == changeset.get_file_mode('foo/bał')
-        assert 33188 == changeset.get_file_mode(u'foo/bał')
+        assert 33188 == changeset.get_file_mode('foo/bał')
 
 
 class TestGitChangesetsWithCommits(_ChangesetsWithCommitsTestCaseixin):
--- a/kallithea/tests/vcs/test_filenodes_unicode_path.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_filenodes_unicode_path.py	Sat May 02 21:20:43 2020 +0200
@@ -9,28 +9,21 @@
 class FileNodeUnicodePathTestsMixin(_BackendTestMixin):
 
     fname = 'ąśðąęłąć.txt'
-    ufname = (fname).decode('utf-8')
 
     @classmethod
     def _get_commits(cls):
-        cls.nodes = [
-            FileNode(cls.fname, content='Foobar'),
-        ]
-
-        commits = [
+        return [
             {
                 'message': 'Initial commit',
                 'author': 'Joe Doe <joe.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 20),
-                'added': cls.nodes,
+                'added': [FileNode(cls.fname, content='Foobar')],
             },
         ]
-        return commits
 
     def test_filenode_path(self):
         node = self.tip.get_node(self.fname)
-        unode = self.tip.get_node(self.ufname)
-        assert node == unode
+        assert node.path == self.fname
 
 
 class TestGitFileNodeUnicodePath(FileNodeUnicodePathTestsMixin):
--- a/kallithea/tests/vcs/test_getitem.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_getitem.py	Sat May 02 21:20:43 2020 +0200
@@ -9,7 +9,7 @@
     @classmethod
     def _get_commits(cls):
         start_date = datetime.datetime(2010, 1, 1, 20)
-        for x in xrange(5):
+        for x in range(5):
             yield {
                 'message': 'Commit %d' % x,
                 'author': 'Joe Doe <joe.doe@example.com>',
@@ -23,7 +23,7 @@
         assert self.repo[-1] == self.repo.get_changeset()
 
     def test__getitem__returns_correct_items(self):
-        changesets = [self.repo[x] for x in xrange(len(self.repo.revisions))]
+        changesets = [self.repo[x] for x in range(len(self.repo.revisions))]
         assert changesets == list(self.repo.get_changesets())
 
 
--- a/kallithea/tests/vcs/test_getslice.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_getslice.py	Sat May 02 21:20:43 2020 +0200
@@ -9,7 +9,7 @@
     @classmethod
     def _get_commits(cls):
         start_date = datetime.datetime(2010, 1, 1, 20)
-        for x in xrange(5):
+        for x in range(5):
             yield {
                 'message': 'Commit %d' % x,
                 'author': 'Joe Doe <joe.doe@example.com>',
--- a/kallithea/tests/vcs/test_git.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_git.py	Sat May 02 21:20:43 2020 +0200
@@ -1,7 +1,6 @@
 import datetime
 import os
 import sys
-import urllib2
 
 import mock
 import pytest
@@ -31,8 +30,8 @@
             GitRepository(wrong_repo_path)
 
     def test_git_cmd_injection(self):
-        repo_inject_path = TEST_GIT_REPO + '; echo "Cake";'
-        with pytest.raises(urllib2.URLError):
+        repo_inject_path = 'file:/%s; echo "Cake";' % TEST_GIT_REPO
+        with pytest.raises(RepositoryError):
             # Should fail because URL will contain the parts after ; too
             GitRepository(get_new_dir('injection-repo'), src_url=repo_inject_path, update_after_clone=True, create=True)
 
@@ -229,7 +228,7 @@
     def test_changeset10(self):
 
         chset10 = self.repo.get_changeset(self.repo.revisions[9])
-        readme = """===
+        readme = b"""===
 VCS
 ===
 
@@ -343,7 +342,7 @@
         start = offset
         end = limit and offset + limit or None
         sliced = list(self.repo[start:end])
-        pytest.failUnlessEqual(result, sliced,
+        pytest.assertEqual(result, sliced,
             msg="Comparison failed for limit=%s, offset=%s"
             "(get_changeset returned: %s and sliced: %s"
             % (limit, offset, result, sliced))
@@ -588,19 +587,19 @@
             'vcs/nodes.py']
         assert set(changed) == set([f.path for f in chset.changed])
 
-    def test_commit_message_is_unicode(self):
+    def test_commit_message_is_str(self):
         for cs in self.repo:
-            assert type(cs.message) == unicode
+            assert isinstance(cs.message, str)
 
-    def test_changeset_author_is_unicode(self):
+    def test_changeset_author_is_str(self):
         for cs in self.repo:
-            assert type(cs.author) == unicode
+            assert isinstance(cs.author, str)
 
-    def test_repo_files_content_is_unicode(self):
+    def test_repo_files_content_is_bytes(self):
         changeset = self.repo.get_changeset()
         for node in changeset.get_node('/'):
             if node.is_file():
-                assert type(node.content) == unicode
+                assert isinstance(node.content, bytes)
 
     def test_wrong_path(self):
         # There is 'setup.py' in the root dir but not there:
@@ -620,30 +619,6 @@
         assert 'marcink none@none' == self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992').author_name
 
 
-class TestGitSpecific():
-
-    def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self):
-        repo = mock.MagicMock()
-        changeset = GitChangeset(repo, 'foobar')
-        changeset._diff_name_status = 'foobar'
-        with pytest.raises(VCSError):
-            changeset.added
-
-    def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self):
-        repo = mock.MagicMock()
-        changeset = GitChangeset(repo, 'foobar')
-        changeset._diff_name_status = 'foobar'
-        with pytest.raises(VCSError):
-            changeset.added
-
-    def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self):
-        repo = mock.MagicMock()
-        changeset = GitChangeset(repo, 'foobar')
-        changeset._diff_name_status = 'foobar'
-        with pytest.raises(VCSError):
-            changeset.added
-
-
 class TestGitSpecificWithRepo(_BackendTestMixin):
     backend_alias = 'git'
 
@@ -657,7 +632,7 @@
                 'added': [
                     FileNode('foobar/static/js/admin/base.js', content='base'),
                     FileNode('foobar/static/admin', content='admin',
-                        mode=0120000), # this is a link
+                        mode=0o120000), # this is a link
                     FileNode('foo', content='foo'),
                 ],
             },
@@ -673,11 +648,11 @@
 
     def test_paths_slow_traversing(self):
         cs = self.repo.get_changeset()
-        assert cs.get_node('foobar').get_node('static').get_node('js').get_node('admin').get_node('base.js').content == 'base'
+        assert cs.get_node('foobar').get_node('static').get_node('js').get_node('admin').get_node('base.js').content == b'base'
 
     def test_paths_fast_traversing(self):
         cs = self.repo.get_changeset()
-        assert cs.get_node('foobar/static/js/admin/base.js').content == 'base'
+        assert cs.get_node('foobar/static/js/admin/base.js').content == b'base'
 
     def test_workdir_get_branch(self):
         self.repo.run_git_command(['checkout', '-b', 'production'])
@@ -689,65 +664,65 @@
         assert self.repo.workdir.get_branch() == 'master'
 
     def test_get_diff_runs_git_command_with_hashes(self):
-        self.repo.run_git_command = mock.Mock(return_value=['', ''])
+        self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
         self.repo.get_diff(0, 1)
-        self.repo.run_git_command.assert_called_once_with(
+        self.repo._run_git_command.assert_called_once_with(
             ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
-             self.repo._get_revision(0), self.repo._get_revision(1)])
+             self.repo._get_revision(0), self.repo._get_revision(1)], cwd=self.repo.path)
 
     def test_get_diff_runs_git_command_with_str_hashes(self):
-        self.repo.run_git_command = mock.Mock(return_value=['', ''])
+        self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
         self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
-        self.repo.run_git_command.assert_called_once_with(
+        self.repo._run_git_command.assert_called_once_with(
             ['show', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
-             self.repo._get_revision(1)])
+             self.repo._get_revision(1)], cwd=self.repo.path)
 
     def test_get_diff_runs_git_command_with_path_if_its_given(self):
-        self.repo.run_git_command = mock.Mock(return_value=['', ''])
+        self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
         self.repo.get_diff(0, 1, 'foo')
-        self.repo.run_git_command.assert_called_once_with(
+        self.repo._run_git_command.assert_called_once_with(
             ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
-             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'])
+             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
 
     def test_get_diff_does_not_sanitize_valid_context(self):
         almost_overflowed_long_int = 2**31-1
 
-        self.repo.run_git_command = mock.Mock(return_value=['', ''])
+        self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
         self.repo.get_diff(0, 1, 'foo', context=almost_overflowed_long_int)
-        self.repo.run_git_command.assert_called_once_with(
+        self.repo._run_git_command.assert_called_once_with(
             ['diff', '-U' + str(almost_overflowed_long_int), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
-             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'])
+             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
 
     def test_get_diff_sanitizes_overflowing_context(self):
         overflowed_long_int = 2**31
         sanitized_overflowed_long_int = overflowed_long_int-1
 
-        self.repo.run_git_command = mock.Mock(return_value=['', ''])
+        self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
         self.repo.get_diff(0, 1, 'foo', context=overflowed_long_int)
 
-        self.repo.run_git_command.assert_called_once_with(
+        self.repo._run_git_command.assert_called_once_with(
             ['diff', '-U' + str(sanitized_overflowed_long_int), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
-             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'])
+             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
 
     def test_get_diff_does_not_sanitize_zero_context(self):
         zero_context = 0
 
-        self.repo.run_git_command = mock.Mock(return_value=['', ''])
+        self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
         self.repo.get_diff(0, 1, 'foo', context=zero_context)
 
-        self.repo.run_git_command.assert_called_once_with(
+        self.repo._run_git_command.assert_called_once_with(
             ['diff', '-U' + str(zero_context), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
-             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'])
+             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
 
     def test_get_diff_sanitizes_negative_context(self):
         negative_context = -10
 
-        self.repo.run_git_command = mock.Mock(return_value=['', ''])
+        self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
         self.repo.get_diff(0, 1, 'foo', context=negative_context)
 
-        self.repo.run_git_command.assert_called_once_with(
+        self.repo._run_git_command.assert_called_once_with(
             ['diff', '-U0', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
-             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'])
+             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
 
 
 class TestGitRegression(_BackendTestMixin):
@@ -804,22 +779,24 @@
         self.repo = GitRepository(self.repo_directory, create=True)
 
         # Create a dictionary where keys are hook names, and values are paths to
-        # them. Deduplicates code in tests a bit.
-        self.hook_directory = self.repo.get_hook_location()
-        self.kallithea_hooks = dict((h, os.path.join(self.hook_directory, h)) for h in ("pre-receive", "post-receive"))
+        # them in the non-bare repo. Deduplicates code in tests a bit.
+        self.kallithea_hooks = {
+            "pre-receive": os.path.join(self.repo.path, '.git', 'hooks', "pre-receive"),
+            "post-receive": os.path.join(self.repo.path, '.git', 'hooks', "post-receive"),
+        }
 
     def test_hooks_created_if_missing(self):
         """
         Tests if hooks are installed in repository if they are missing.
         """
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             if os.path.exists(hook_path):
                 os.remove(hook_path)
 
         ScmModel().install_git_hooks(repo=self.repo)
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             assert os.path.exists(hook_path)
 
     def test_kallithea_hooks_updated(self):
@@ -827,13 +804,13 @@
         Tests if hooks are updated if they are Kallithea hooks already.
         """
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             with open(hook_path, "w") as f:
                 f.write("KALLITHEA_HOOK_VER=0.0.0\nJUST_BOGUS")
 
         ScmModel().install_git_hooks(repo=self.repo)
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             with open(hook_path) as f:
                 assert "JUST_BOGUS" not in f.read()
 
@@ -842,13 +819,13 @@
         Tests if hooks are left untouched if they are not Kallithea hooks.
         """
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             with open(hook_path, "w") as f:
                 f.write("#!/bin/bash\n#CUSTOM_HOOK")
 
         ScmModel().install_git_hooks(repo=self.repo)
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             with open(hook_path) as f:
                 assert "CUSTOM_HOOK" in f.read()
 
@@ -857,12 +834,12 @@
         Tests if hooks are forcefully updated even though they are custom hooks.
         """
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             with open(hook_path, "w") as f:
                 f.write("#!/bin/bash\n#CUSTOM_HOOK")
 
         ScmModel().install_git_hooks(repo=self.repo, force_create=True)
 
-        for hook, hook_path in self.kallithea_hooks.iteritems():
+        for hook, hook_path in self.kallithea_hooks.items():
             with open(hook_path) as f:
                 assert "KALLITHEA_HOOK_VER" in f.read()
--- a/kallithea/tests/vcs/test_hg.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_hg.py	Sat May 02 21:20:43 2020 +0200
@@ -3,7 +3,6 @@
 import mock
 import pytest
 
-from kallithea.lib.utils2 import safe_str
 from kallithea.lib.vcs.backends.hg import MercurialChangeset, MercurialRepository
 from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError
 from kallithea.lib.vcs.nodes import NodeKind, NodeState
@@ -19,7 +18,7 @@
                       % TEST_HG_REPO_CLONE)
 
     def setup_method(self):
-        self.repo = MercurialRepository(safe_str(TEST_HG_REPO))
+        self.repo = MercurialRepository(TEST_HG_REPO)
 
     def test_wrong_repo_path(self):
         wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo')
@@ -28,11 +27,11 @@
 
     def test_unicode_path_repo(self):
         with pytest.raises(VCSError):
-            MercurialRepository(u'iShouldFail')
+            MercurialRepository('iShouldFail')
 
     def test_repo_clone(self):
         self.__check_for_existing_repo()
-        repo = MercurialRepository(safe_str(TEST_HG_REPO))
+        repo = MercurialRepository(TEST_HG_REPO)
         repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
             src_url=TEST_HG_REPO, update_after_clone=True)
         assert len(repo.revisions) == len(repo_clone.revisions)
@@ -42,7 +41,7 @@
             assert raw_id == repo_clone.get_changeset(raw_id).raw_id
 
     def test_repo_clone_with_update(self):
-        repo = MercurialRepository(safe_str(TEST_HG_REPO))
+        repo = MercurialRepository(TEST_HG_REPO)
         repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update',
             src_url=TEST_HG_REPO, update_after_clone=True)
         assert len(repo.revisions) == len(repo_clone.revisions)
@@ -55,7 +54,7 @@
         )
 
     def test_repo_clone_without_update(self):
-        repo = MercurialRepository(safe_str(TEST_HG_REPO))
+        repo = MercurialRepository(TEST_HG_REPO)
         repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update',
             src_url=TEST_HG_REPO, update_after_clone=False)
         assert len(repo.revisions) == len(repo_clone.revisions)
@@ -219,7 +218,7 @@
     def test_changeset10(self):
 
         chset10 = self.repo.get_changeset(10)
-        readme = """===
+        readme = b"""===
 VCS
 ===
 
@@ -235,7 +234,7 @@
         assert node.kind == NodeKind.FILE
         assert node.content == readme
 
-    @mock.patch('kallithea.lib.vcs.backends.hg.repository.diffopts')
+    @mock.patch('mercurial.mdiff.diffopts')
     def test_get_diff_does_not_sanitize_zero_context(self, mock_diffopts):
         zero_context = 0
 
@@ -243,7 +242,7 @@
 
         mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context)
 
-    @mock.patch('kallithea.lib.vcs.backends.hg.repository.diffopts')
+    @mock.patch('mercurial.mdiff.diffopts')
     def test_get_diff_sanitizes_negative_context(self, mock_diffopts):
         negative_context = -10
         zero_context = 0
@@ -256,7 +255,7 @@
 class TestMercurialChangeset(object):
 
     def setup_method(self):
-        self.repo = MercurialRepository(safe_str(TEST_HG_REPO))
+        self.repo = MercurialRepository(TEST_HG_REPO)
 
     def _test_equality(self, changeset):
         revision = changeset.revision
@@ -444,20 +443,20 @@
         #    added:   20
         #    removed: 1
         changed = set(['.hgignore'
-            , 'README.rst' , 'docs/conf.py' , 'docs/index.rst' , 'setup.py'
-            , 'tests/test_hg.py' , 'tests/test_nodes.py' , 'vcs/__init__.py'
-            , 'vcs/backends/__init__.py' , 'vcs/backends/base.py'
-            , 'vcs/backends/hg.py' , 'vcs/nodes.py' , 'vcs/utils/__init__.py'])
+            , 'README.rst', 'docs/conf.py', 'docs/index.rst', 'setup.py'
+            , 'tests/test_hg.py', 'tests/test_nodes.py', 'vcs/__init__.py'
+            , 'vcs/backends/__init__.py', 'vcs/backends/base.py'
+            , 'vcs/backends/hg.py', 'vcs/nodes.py', 'vcs/utils/__init__.py'])
 
         added = set(['docs/api/backends/hg.rst'
-            , 'docs/api/backends/index.rst' , 'docs/api/index.rst'
-            , 'docs/api/nodes.rst' , 'docs/api/web/index.rst'
-            , 'docs/api/web/simplevcs.rst' , 'docs/installation.rst'
-            , 'docs/quickstart.rst' , 'setup.cfg' , 'vcs/utils/baseui_config.py'
-            , 'vcs/utils/web.py' , 'vcs/web/__init__.py' , 'vcs/web/exceptions.py'
-            , 'vcs/web/simplevcs/__init__.py' , 'vcs/web/simplevcs/exceptions.py'
-            , 'vcs/web/simplevcs/middleware.py' , 'vcs/web/simplevcs/models.py'
-            , 'vcs/web/simplevcs/settings.py' , 'vcs/web/simplevcs/utils.py'
+            , 'docs/api/backends/index.rst', 'docs/api/index.rst'
+            , 'docs/api/nodes.rst', 'docs/api/web/index.rst'
+            , 'docs/api/web/simplevcs.rst', 'docs/installation.rst'
+            , 'docs/quickstart.rst', 'setup.cfg', 'vcs/utils/baseui_config.py'
+            , 'vcs/utils/web.py', 'vcs/web/__init__.py', 'vcs/web/exceptions.py'
+            , 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py'
+            , 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py'
+            , 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py'
             , 'vcs/web/simplevcs/views.py'])
 
         removed = set(['docs/api.rst'])
@@ -536,19 +535,19 @@
         # but it would be one of ``removed`` (changeset's attribute)
         assert path in [rf.path for rf in chset.removed]
 
-    def test_commit_message_is_unicode(self):
+    def test_commit_message_is_str(self):
         for cm in self.repo:
-            assert type(cm.message) == unicode
+            assert isinstance(cm.message, str)
 
-    def test_changeset_author_is_unicode(self):
+    def test_changeset_author_is_str(self):
         for cm in self.repo:
-            assert type(cm.author) == unicode
+            assert isinstance(cm.author, str)
 
-    def test_repo_files_content_is_unicode(self):
+    def test_repo_files_content_is_bytes(self):
         test_changeset = self.repo.get_changeset(100)
         for node in test_changeset.get_node('/'):
             if node.is_file():
-                assert type(node.content) == unicode
+                assert isinstance(node.content, bytes)
 
     def test_wrong_path(self):
         # There is 'setup.py' in the root dir but not there:
--- a/kallithea/tests/vcs/test_inmemchangesets.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_inmemchangesets.py	Sat May 02 21:20:43 2020 +0200
@@ -7,11 +7,9 @@
 
 import pytest
 
-from kallithea.lib import vcs
-from kallithea.lib.vcs.exceptions import (
-    EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError)
+from kallithea.lib.vcs.exceptions import (EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
+                                          NodeDoesNotExistError, NodeNotChangedError)
 from kallithea.lib.vcs.nodes import DirNode, FileNode
-from kallithea.lib.vcs.utils import safe_unicode
 from kallithea.tests.vcs.base import _BackendTestMixin
 
 
@@ -38,8 +36,8 @@
             for node in self.nodes]
         for node in to_add:
             self.imc.add(node)
-        message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
-        author = unicode(self.__class__)
+        message = 'Added: %s' % ', '.join((node.path for node in self.nodes))
+        author = str(self.__class__)
         changeset = self.imc.commit(message=message, author=author)
 
         newtip = self.repo.get_changeset()
@@ -60,8 +58,8 @@
         to_add = [FileNode(node.path, content=node.content)
             for node in self.nodes]
         self.imc.add(*to_add)
-        message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
-        author = unicode(self.__class__)
+        message = 'Added: %s' % ', '.join((node.path for node in self.nodes))
+        author = str(self.__class__)
         changeset = self.imc.commit(message=message, author=author)
 
         newtip = self.repo.get_changeset()
@@ -80,11 +78,11 @@
     def test_add_actually_adds_all_nodes_at_second_commit_too(self):
         self.imc.add(FileNode('foo/bar/image.png', content='\0'))
         self.imc.add(FileNode('foo/README.txt', content='readme!'))
-        changeset = self.imc.commit(u'Initial', u'joe.doe@example.com')
+        changeset = self.imc.commit('Initial', 'joe.doe@example.com')
         assert isinstance(changeset.get_node('foo'), DirNode)
         assert isinstance(changeset.get_node('foo/bar'), DirNode)
-        assert changeset.get_node('foo/bar/image.png').content == '\0'
-        assert changeset.get_node('foo/README.txt').content == 'readme!'
+        assert changeset.get_node('foo/bar/image.png').content == b'\0'
+        assert changeset.get_node('foo/README.txt').content == b'readme!'
 
         # commit some more files again
         to_add = [
@@ -95,23 +93,23 @@
             FileNode('foobar/barbaz', content='foo'),
         ]
         self.imc.add(*to_add)
-        changeset = self.imc.commit(u'Another', u'joe.doe@example.com')
-        changeset.get_node('foo/bar/foobaz/bar').content == 'foo'
-        changeset.get_node('foo/bar/another/bar').content == 'foo'
-        changeset.get_node('foo/baz.txt').content == 'foo'
-        changeset.get_node('foobar/foobaz/file').content == 'foo'
-        changeset.get_node('foobar/barbaz').content == 'foo'
+        changeset = self.imc.commit('Another', 'joe.doe@example.com')
+        changeset.get_node('foo/bar/foobaz/bar').content == b'foo'
+        changeset.get_node('foo/bar/another/bar').content == b'foo'
+        changeset.get_node('foo/baz.txt').content == b'foo'
+        changeset.get_node('foobar/foobaz/file').content == b'foo'
+        changeset.get_node('foobar/barbaz').content == b'foo'
 
     def test_add_non_ascii_files(self):
         rev_count = len(self.repo.revisions)
         to_add = [
             FileNode('żółwik/zwierzątko', content='ćććć'),
-            FileNode(u'żółwik/zwierzątko_uni', content=u'ćććć'),
+            FileNode('żółwik/zwierzątko_uni', content='ćććć'),
         ]
         for node in to_add:
             self.imc.add(node)
-        message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
-        author = unicode(self.__class__)
+        message = 'Added: %s' % ', '.join((node.path for node in self.nodes))
+        author = str(self.__class__)
         changeset = self.imc.commit(message=message, author=author)
 
         newtip = self.repo.get_changeset()
@@ -136,7 +134,7 @@
     def test_check_integrity_raise_already_exist(self):
         node = FileNode('foobar', content='baz')
         self.imc.add(node)
-        self.imc.commit(message=u'Added foobar', author=unicode(self))
+        self.imc.commit(message='Added foobar', author=str(self))
         self.imc.add(node)
         with pytest.raises(NodeAlreadyExistsError):
             self.imc.commit(message='new message',
@@ -145,45 +143,43 @@
     def test_change(self):
         self.imc.add(FileNode('foo/bar/baz', content='foo'))
         self.imc.add(FileNode('foo/fbar', content='foobar'))
-        tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
+        tip = self.imc.commit('Initial', 'joe.doe@example.com')
 
         # Change node's content
         node = FileNode('foo/bar/baz', content='My **changed** content')
         self.imc.change(node)
-        self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
+        self.imc.commit('Changed %s' % node.path, 'joe.doe@example.com')
 
         newtip = self.repo.get_changeset()
         assert tip != newtip
-        assert tip.id != newtip.id
-        assert newtip.get_node('foo/bar/baz').content == 'My **changed** content'
+        assert tip.raw_id != newtip.raw_id
+        assert newtip.get_node('foo/bar/baz').content == b'My **changed** content'
 
     def test_change_non_ascii(self):
         to_add = [
             FileNode('żółwik/zwierzątko', content='ćććć'),
-            FileNode(u'żółwik/zwierzątko_uni', content=u'ćććć'),
+            FileNode('żółwik/zwierzątko_uni', content='ćććć'),
         ]
         for node in to_add:
             self.imc.add(node)
 
-        tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
+        tip = self.imc.commit('Initial', 'joe.doe@example.com')
 
         # Change node's content
         node = FileNode('żółwik/zwierzątko', content='My **changed** content')
         self.imc.change(node)
-        self.imc.commit(u'Changed %s' % safe_unicode(node.path),
-                        u'joe.doe@example.com')
+        self.imc.commit('Changed %s' % node.path, 'joe.doe@example.com')
 
-        node = FileNode(u'żółwik/zwierzątko_uni', content=u'My **changed** content')
+        node = FileNode('żółwik/zwierzątko_uni', content='My **changed** content')
         self.imc.change(node)
-        self.imc.commit(u'Changed %s' % safe_unicode(node.path),
-                        u'joe.doe@example.com')
+        self.imc.commit('Changed %s' % node.path, 'joe.doe@example.com')
 
         newtip = self.repo.get_changeset()
         assert tip != newtip
-        assert tip.id != newtip.id
+        assert tip.raw_id != newtip.raw_id
 
-        assert newtip.get_node('żółwik/zwierzątko').content == 'My **changed** content'
-        assert newtip.get_node('żółwik/zwierzątko_uni').content == 'My **changed** content'
+        assert newtip.get_node('żółwik/zwierzątko').content == b'My **changed** content'
+        assert newtip.get_node('żółwik/zwierzątko_uni').content == b'My **changed** content'
 
     def test_change_raise_empty_repository(self):
         node = FileNode('foobar')
@@ -193,7 +189,7 @@
     def test_check_integrity_change_raise_node_does_not_exist(self):
         node = FileNode('foobar', content='baz')
         self.imc.add(node)
-        self.imc.commit(message=u'Added foobar', author=unicode(self))
+        self.imc.commit(message='Added foobar', author=str(self))
         node = FileNode('not-foobar', content='')
         self.imc.change(node)
         with pytest.raises(NodeDoesNotExistError):
@@ -202,7 +198,7 @@
     def test_change_raise_node_already_changed(self):
         node = FileNode('foobar', content='baz')
         self.imc.add(node)
-        self.imc.commit(message=u'Added foobar', author=unicode(self))
+        self.imc.commit(message='Added foobar', author=str(self))
         node = FileNode('foobar', content='more baz')
         self.imc.change(node)
         with pytest.raises(NodeAlreadyChangedError):
@@ -215,14 +211,14 @@
         self.imc.change(node)
         with pytest.raises(NodeNotChangedError):
             self.imc.commit(
-                message=u'Trying to mark node as changed without touching it',
-                author=unicode(self)
+                message='Trying to mark node as changed without touching it',
+                author=str(self),
             )
 
     def test_change_raise_node_already_removed(self):
         node = FileNode('foobar', content='baz')
         self.imc.add(node)
-        self.imc.commit(message=u'Added foobar', author=unicode(self))
+        self.imc.commit(message='Added foobar', author=str(self))
         self.imc.remove(FileNode('foobar'))
         with pytest.raises(NodeAlreadyRemovedError):
             self.imc.change(node)
@@ -234,21 +230,21 @@
         node = self.nodes[0]
         assert node.content == tip.get_node(node.path).content
         self.imc.remove(node)
-        self.imc.commit(message=u'Removed %s' % node.path, author=unicode(self))
+        self.imc.commit(message='Removed %s' % node.path, author=str(self))
 
         newtip = self.repo.get_changeset()
         assert tip != newtip
-        assert tip.id != newtip.id
+        assert tip.raw_id != newtip.raw_id
         with pytest.raises(NodeDoesNotExistError):
             newtip.get_node(node.path)
 
     def test_remove_last_file_from_directory(self):
         node = FileNode('omg/qwe/foo/bar', content='foobar')
         self.imc.add(node)
-        self.imc.commit(u'added', u'joe doe')
+        self.imc.commit('added', 'joe doe')
 
         self.imc.remove(node)
-        tip = self.imc.commit(u'removed', u'joe doe')
+        tip = self.imc.commit('removed', 'joe doe')
         with pytest.raises(NodeDoesNotExistError):
             tip.get_node('omg/qwe/foo/bar')
 
@@ -257,7 +253,7 @@
         with pytest.raises(NodeDoesNotExistError):
             self.imc.commit(
                 message='Trying to remove node at empty repository',
-                author=str(self)
+                author=str(self),
             )
 
     def test_check_integrity_remove_raise_node_does_not_exist(self):
@@ -267,8 +263,8 @@
         self.imc.remove(node)
         with pytest.raises(NodeDoesNotExistError):
             self.imc.commit(
-                message=u'Trying to remove not existing node',
-                author=unicode(self)
+                message='Trying to remove not existing node',
+                author=str(self),
             )
 
     def test_remove_raise_node_already_removed(self):
@@ -301,12 +297,12 @@
     def test_multiple_commits(self):
         N = 3  # number of commits to perform
         last = None
-        for x in xrange(N):
+        for x in range(N):
             fname = 'file%s' % str(x).rjust(5, '0')
             content = 'foobar\n' * x
             node = FileNode(fname, content=content)
             self.imc.add(node)
-            commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
+            commit = self.imc.commit("Commit no. %s" % (x + 1), author='vcs')
             assert last != commit
             last = commit
 
@@ -320,8 +316,8 @@
         node = FileNode('foobar.txt', content='Foobared!')
         self.imc.add(node)
         date = datetime.datetime(1985, 1, 30, 1, 45)
-        commit = self.imc.commit(u"Committed at time when I was born ;-)",
-            author=u'lb <lb@example.com>', date=date)
+        commit = self.imc.commit("Committed at time when I was born ;-)",
+            author='lb <lb@example.com>', date=date)
 
         assert commit.date == date
 
--- a/kallithea/tests/vcs/test_nodes.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_nodes.py	Sat May 02 21:20:43 2020 +0200
@@ -3,6 +3,7 @@
 
 import pytest
 
+from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.nodes import DirNode, FileNode, Node, NodeError, NodeKind
 
 
@@ -48,11 +49,6 @@
         with pytest.raises(NodeError):
             Node('', NodeKind.FILE)
 
-    def test_kind_setter(self):
-        node = Node('', NodeKind.DIR)
-        with pytest.raises(NodeError):
-            setattr(node, 'kind', NodeKind.FILE)
-
     def _test_parent_path(self, node_path, expected_parent_path):
         """
         Tests if node's parent path are properly computed.
@@ -103,7 +99,7 @@
         node = DirNode('any_dir')
 
         assert node.is_dir()
-        with pytest.raises(NodeError):
+        with pytest.raises(AttributeError):  # Note: this used to raise NodeError
             getattr(node, 'content')
 
     def test_dir_node_iter(self):
@@ -144,13 +140,13 @@
         assert not mode & stat.S_IXOTH
 
     def test_file_node_is_executable(self):
-        node = FileNode('foobar', 'empty... almost', mode=0100755)
+        node = FileNode('foobar', 'empty... almost', mode=0o100755)
         assert node.is_executable
 
-        node = FileNode('foobar', 'empty... almost', mode=0100500)
+        node = FileNode('foobar', 'empty... almost', mode=0o100500)
         assert node.is_executable
 
-        node = FileNode('foobar', 'empty... almost', mode=0100644)
+        node = FileNode('foobar', 'empty... almost', mode=0o100644)
         assert not node.is_executable
 
     def test_mimetype(self):
@@ -158,10 +154,10 @@
         tar_node = FileNode('test.tar.gz')
 
         my_node2 = FileNode('myfile2')
-        my_node2._content = 'foobar'
+        my_node2._content = b'foobar'
 
         my_node3 = FileNode('myfile3')
-        my_node3._content = '\0foobar'
+        my_node3._content = b'\0foobar'
 
         assert py_node.mimetype == mimetypes.guess_type(py_node.name)[0]
         assert py_node.get_mimetype() == mimetypes.guess_type(py_node.name)
@@ -182,3 +178,8 @@
         data = """\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?"\x14j?\xa2M\x7fB\x14F\x9aQ?&\x842?\x0b\x89"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?=\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq"Sw.\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$"q[\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?\x9f\x8cE??x\x94??\r\xbdtoJU5"0N\x10U?\x00??V\t\x02\x9f\x81?U?\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&"?\xb7ZP \x0c<?O\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J\x0bV"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u\xb2?1\xbe|/\x92M@\xa2!F?\xa9>"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00IEND\xaeB`\x82"""
         filenode = FileNode('calendar.png', content=data)
         assert filenode.is_binary
+
+    def test_if_binary_empty(self):
+        empty_cs = EmptyChangeset()
+        filenode = FileNode('foo', changeset=empty_cs)
+        assert not filenode.is_binary
--- a/kallithea/tests/vcs/test_repository.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_repository.py	Sat May 02 21:20:43 2020 +0200
@@ -85,7 +85,7 @@
                 'removed': [FileNode('foobar')],
             },
             {
-                'message': u'Commit that contains glob pattern in filename',
+                'message': 'Commit that contains glob pattern in filename',
                 'author': 'Jane Doe <jane.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 22),
                 'added': [
@@ -110,7 +110,7 @@
 
     def test_initial_commit_diff(self):
         initial_rev = self.repo.revisions[0]
-        assert self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev) == '''diff --git a/foobar b/foobar
+        assert self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev) == br'''diff --git a/foobar b/foobar
 new file mode 100644
 index 0000000000000000000000000000000000000000..f6ea0495187600e7b2288c8ac19c5886383a4632
 --- /dev/null
@@ -130,7 +130,7 @@
 
     def test_second_changeset_diff(self):
         revs = self.repo.revisions
-        assert self.repo.get_diff(revs[0], revs[1]) == '''diff --git a/foobar b/foobar
+        assert self.repo.get_diff(revs[0], revs[1]) == br'''diff --git a/foobar b/foobar
 index f6ea0495187600e7b2288c8ac19c5886383a4632..389865bb681b358c9b102d79abd8d5f941e96551 100644
 --- a/foobar
 +++ b/foobar
@@ -151,7 +151,7 @@
 
     def test_third_changeset_diff(self):
         revs = self.repo.revisions
-        assert self.repo.get_diff(revs[1], revs[2]) == '''diff --git a/foobar b/foobar
+        assert self.repo.get_diff(revs[1], revs[2]) == br'''diff --git a/foobar b/foobar
 deleted file mode 100644
 index 389865bb681b358c9b102d79abd8d5f941e96551..0000000000000000000000000000000000000000
 --- a/foobar
@@ -173,7 +173,7 @@
 
     def test_fourth_changeset_diff(self):
         revs = self.repo.revisions
-        assert self.repo.get_diff(revs[2], revs[3]) == '''diff --git a/README{ b/README{
+        assert self.repo.get_diff(revs[2], revs[3]) == br'''diff --git a/README{ b/README{
 new file mode 100644
 index 0000000000000000000000000000000000000000..cdc0c1b5d234feedb37bbac19cd1b6442061102d
 --- /dev/null
@@ -189,7 +189,7 @@
 
     def test_initial_commit_diff(self):
         initial_rev = self.repo.revisions[0]
-        assert self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev) == '''diff --git a/foobar b/foobar
+        assert self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev) == br'''diff --git a/foobar b/foobar
 new file mode 100644
 --- /dev/null
 +++ b/foobar
@@ -207,7 +207,7 @@
 
     def test_second_changeset_diff(self):
         revs = self.repo.revisions
-        assert self.repo.get_diff(revs[0], revs[1]) == '''diff --git a/foobar b/foobar
+        assert self.repo.get_diff(revs[0], revs[1]) == br'''diff --git a/foobar b/foobar
 --- a/foobar
 +++ b/foobar
 @@ -1,1 +1,1 @@
@@ -226,7 +226,7 @@
 
     def test_third_changeset_diff(self):
         revs = self.repo.revisions
-        assert self.repo.get_diff(revs[1], revs[2]) == '''diff --git a/foobar b/foobar
+        assert self.repo.get_diff(revs[1], revs[2]) == br'''diff --git a/foobar b/foobar
 deleted file mode 100644
 --- a/foobar
 +++ /dev/null
@@ -246,7 +246,7 @@
 
     def test_fourth_changeset_diff(self):
         revs = self.repo.revisions
-        assert self.repo.get_diff(revs[2], revs[3]) == '''diff --git a/README{ b/README{
+        assert self.repo.get_diff(revs[2], revs[3]) == br'''diff --git a/README{ b/README{
 new file mode 100644
 --- /dev/null
 +++ b/README{
--- a/kallithea/tests/vcs/test_utils.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_utils.py	Sat May 02 21:20:43 2020 +0200
@@ -191,8 +191,8 @@
                    ('justname', '')),
                   ('Mr Double Name withemail@example.com ',
                    ('Mr Double Name', 'withemail@example.com')),
-                  (u'John Doe <джондо à éẋàṁṗłê.ç°ḿ>',
-                   (u'John Doe <\u0434\u0436\u043e\u043d\u0434\u043e \xe0 \xe9\u1e8b\xe0\u1e41\u1e57\u0142\xea.\xe7\xb0\u1e3f>', '')),
+                  ('John Doe <джондо à éẋàṁṗłê.ç°ḿ>',
+                   ('John Doe <\u0434\u0436\u043e\u043d\u0434\u043e \xe0 \xe9\u1e8b\xe0\u1e41\u1e57\u0142\xea.\xe7\xb0\u1e3f>', '')),
                   ]
 
     def test_author_email(self):
--- a/kallithea/tests/vcs/test_vcs.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_vcs.py	Sat May 02 21:20:43 2020 +0200
@@ -3,7 +3,6 @@
 
 import pytest
 
-from kallithea.lib.utils2 import safe_str
 from kallithea.lib.vcs import VCSError, get_backend, get_repo
 from kallithea.lib.vcs.backends.hg import MercurialRepository
 from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_HG_REPO, TESTS_TMP_PATH
@@ -22,14 +21,14 @@
         alias = 'hg'
         path = TEST_HG_REPO
         backend = get_backend(alias)
-        repo = backend(safe_str(path))
+        repo = backend(path)
         assert 'hg' == repo.alias
 
     def test_alias_detect_git(self):
         alias = 'git'
         path = TEST_GIT_REPO
         backend = get_backend(alias)
-        repo = backend(safe_str(path))
+        repo = backend(path)
         assert 'git' == repo.alias
 
     def test_wrong_alias(self):
@@ -41,28 +40,28 @@
         alias = 'hg'
         path = TEST_HG_REPO
         backend = get_backend(alias)
-        repo = backend(safe_str(path))
+        repo = backend(path)
 
-        assert repo.__class__ == get_repo(safe_str(path), alias).__class__
-        assert repo.path == get_repo(safe_str(path), alias).path
+        assert repo.__class__ == get_repo(path, alias).__class__
+        assert repo.path == get_repo(path, alias).path
 
     def test_get_repo_autoalias_hg(self):
         alias = 'hg'
         path = TEST_HG_REPO
         backend = get_backend(alias)
-        repo = backend(safe_str(path))
+        repo = backend(path)
 
-        assert repo.__class__ == get_repo(safe_str(path)).__class__
-        assert repo.path == get_repo(safe_str(path)).path
+        assert repo.__class__ == get_repo(path).__class__
+        assert repo.path == get_repo(path).path
 
     def test_get_repo_autoalias_git(self):
         alias = 'git'
         path = TEST_GIT_REPO
         backend = get_backend(alias)
-        repo = backend(safe_str(path))
+        repo = backend(path)
 
-        assert repo.__class__ == get_repo(safe_str(path)).__class__
-        assert repo.path == get_repo(safe_str(path)).path
+        assert repo.__class__ == get_repo(path).__class__
+        assert repo.path == get_repo(path).path
 
     def test_get_repo_err(self):
         blank_repo_path = os.path.join(TESTS_TMP_PATH, 'blank-error-repo')
--- a/kallithea/tests/vcs/test_workdirs.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/kallithea/tests/vcs/test_workdirs.py	Sat May 02 21:20:43 2020 +0200
@@ -12,8 +12,8 @@
     def _get_commits(cls):
         commits = [
             {
-                'message': u'Initial commit',
-                'author': u'Joe Doe <joe.doe@example.com>',
+                'message': 'Initial commit',
+                'author': 'Joe Doe <joe.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 20),
                 'added': [
                     FileNode('foobar', content='Foobar'),
@@ -22,8 +22,8 @@
                 ],
             },
             {
-                'message': u'Changes...',
-                'author': u'Jane Doe <jane.doe@example.com>',
+                'message': 'Changes...',
+                'author': 'Jane Doe <jane.doe@example.com>',
                 'date': datetime.datetime(2010, 1, 1, 21),
                 'added': [
                     FileNode('some/new.txt', content='news...'),
@@ -43,8 +43,8 @@
         self.imc.add(FileNode('docs/index.txt',
             content='Documentation\n'))
         self.imc.commit(
-            message=u'New branch: foobar',
-            author=u'joe',
+            message='New branch: foobar',
+            author='joe',
             branch='foobar',
         )
         assert self.repo.workdir.get_branch() == self.default_branch
@@ -54,8 +54,8 @@
         self.imc.add(FileNode('docs/index.txt',
             content='Documentation\n'))
         head = self.imc.commit(
-            message=u'New branch: foobar',
-            author=u'joe',
+            message='New branch: foobar',
+            author='joe',
             branch='foobar',
         )
         assert self.repo.workdir.get_branch() == self.default_branch
@@ -73,7 +73,7 @@
             self.repo.workdir.checkout_branch(branch='foobranch')
         # create new branch 'foobranch'.
         self.imc.add(FileNode('file1', content='blah'))
-        self.imc.commit(message=u'asd', author=u'john', branch='foobranch')
+        self.imc.commit(message='asd', author='john', branch='foobranch')
         # go back to the default branch
         self.repo.workdir.checkout_branch()
         assert self.repo.workdir.get_branch() == self.backend_class.DEFAULT_BRANCH_NAME
--- a/scripts/docs-headings.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/docs-headings.py	Sat May 02 21:20:43 2020 +0200
@@ -1,11 +1,9 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 
 """
 Consistent formatting of rst section titles
 """
 
-from __future__ import print_function
-
 import re
 import subprocess
 
@@ -35,6 +33,7 @@
 def main():
     filenames = subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines()
     for fn in filenames:
+        fn = fn.decode()
         print('processing %s' % fn)
         s = open(fn).read()
 
--- a/scripts/generate-ini.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/generate-ini.py	Sat May 02 21:20:43 2020 +0200
@@ -1,10 +1,8 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """
 Based on kallithea/lib/paster_commands/template.ini.mako, generate development.ini
 """
 
-from __future__ import print_function
-
 import re
 
 from kallithea.lib import inifile
@@ -62,6 +60,13 @@
         print('writing:', makofile)
         open(makofile, 'w').write(mako_marked_up)
 
+    lines = re.findall(r'\n(# [^ ].*)', mako_marked_up)
+    if lines:
+        print('ERROR: the template .ini file convention is to use "## Foo Bar" for text comments and "#foo = bar" for disabled settings')
+        for line in lines:
+            print(line)
+        raise SystemExit(1)
+
     # create ini files
     for fn, settings in ini_files:
         print('updating:', fn)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/i18n	Sat May 02 21:20:43 2020 +0200
@@ -0,0 +1,140 @@
+#!/usr/bin/env python3
+
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import shutil
+import sys
+
+import click
+
+import i18n_utils
+
+
+"""
+Tool for maintenance of .po and .pot files
+
+Normally, the i18n-related files contain for each translatable string a
+reference to all the source code locations where this string is found. This
+meta data is useful for translators to assess how strings are used, but is not
+relevant for normal development nor for running Kallithea. Such meta data, or
+derived data like kallithea.pot, will inherently be outdated, and create
+unnecessary churn and repository growth, making it harder to spot actual and
+important changes.
+"""
+
+@click.group()
+@click.option('--debug/--no-debug', default=False)
+def cli(debug):
+    if (debug):
+        i18n_utils.do_debug = True
+    pass
+
+@cli.command()
+@click.argument('po_files', nargs=-1)
+@click.option('--merge-pot-file', default=None)
+@click.option('--strip/--no-strip', default=False)
+def normalize_po_files(po_files, merge_pot_file, strip):
+    """Normalize the specified .po and .pot files.
+
+    By default, only actual translations and essential headers will be
+    preserved, just as we want it in the main branches with minimal noise.
+
+    If a .pot file is specified, the po files will instead be updated by
+    running GNU msgmerge with this .pot file, thus updating source code
+    references and preserving comments and outdated translations.
+    """
+    for po_file in po_files:
+        i18n_utils._normalize_po_file(po_file, merge_pot_file=merge_pot_file, strip=strip)
+
+@cli.command()
+@click.argument('local')
+@click.argument('base')
+@click.argument('other')
+@click.argument('output')
+@click.option('--merge-pot-file', default=None)
+@click.option('--strip/--no-strip', default=False)
+def normalized_merge(local, base, other, output, merge_pot_file, strip):
+    """Merge tool for use with 'hg merge/rebase/graft --tool'
+
+    i18n files are partially manually editored original source of content, and
+    partially automatically generated and updated. That create a lot of churn
+    and often cause a lot of merge conflicts.
+
+    To avoid that, this merge tool wrapper will normalize .po content before
+    running the merge tool.
+
+    By default, only actual translations and essential headers will be
+    preserved, just as we want it in the main branches with minimal noise.
+
+    If a .pot file is specified, the po files will instead be updated by
+    running GNU msgmerge with this .pot file, thus updating source code
+    references and preserving comments and outdated translations.
+
+    Add the following to your user or repository-specific .hgrc file to use it:
+        [merge-tools]
+        i18n.executable = /path/to/scripts/i18n
+        i18n.args = normalized-merge $local $base $other $output
+
+    and then invoke merge/rebase/graft with the additional argument '--tool i18n'.
+    """
+    from mercurial import (
+        context,
+        simplemerge,
+        ui as uimod,
+    )
+
+    print('i18n normalized-merge: normalizing and merging %s' % output)
+
+    i18n_utils._normalize_po_file(local, merge_pot_file=merge_pot_file, strip=strip)
+    i18n_utils._normalize_po_file(base, merge_pot_file=merge_pot_file, strip=strip)
+    i18n_utils._normalize_po_file(other, merge_pot_file=merge_pot_file, strip=strip)
+    i18n_utils._normalize_po_file(output, merge_pot_file=merge_pot_file, strip=strip)
+
+    # simplemerge will write markers to 'local' if it fails, keep a copy without markers
+    localkeep = local + '.keep'
+    shutil.copyfile(local, localkeep)
+
+    ret = simplemerge.simplemerge(uimod.ui.load(),
+         context.arbitraryfilectx(local.encode('utf-8')),
+         context.arbitraryfilectx(base.encode('utf-8')),
+         context.arbitraryfilectx(other.encode('utf-8')),
+         label=[b'local', b'other', b'base'],
+         mode='merge',
+    )
+    shutil.copyfile(local, output)  # simplemerge wrote to local - either resolved or with conflict markers
+    if ret:
+        shutil.copyfile(localkeep, local)
+        basekeep = base + '.keep'
+        otherkeep = other + '.keep'
+        shutil.copyfile(base, basekeep)
+        shutil.copyfile(other, otherkeep)
+        sys.stderr.write("Error: simple merge failed and %s is left with conflict markers. Resolve the conflicts, then use 'hg resolve -m'.\n" % output)
+        sys.stderr.write('Resolve with e.g.: kdiff3 %s %s %s -o %s\n' % (basekeep, localkeep, otherkeep, output))
+        sys.exit(ret)
+
+    os.remove(localkeep)
+
+@cli.command()
+@click.argument('file1')
+@click.argument('file2')
+@click.option('--merge-pot-file', default=None)
+@click.option('--strip/--no-strip', default=False)
+def normalized_diff(file1, file2, merge_pot_file, strip):
+    """Compare two files while transparently normalizing them."""
+    sys.exit(i18n_utils._normalized_diff(file1, file2, merge_pot_file=merge_pot_file, strip=strip))
+
+if __name__ == '__main__':
+    cli()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/i18n_utils.py	Sat May 02 21:20:43 2020 +0200
@@ -0,0 +1,197 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import print_function
+
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+
+
+do_debug = False  # set from scripts/i18n --debug
+
+def debug(*args, **kwargs):
+    if do_debug:
+        print(*args, **kwargs)
+
+def runcmd(cmd, *args, **kwargs):
+    debug('... Executing command: %s' % ' '.join(cmd))
+    subprocess.check_call(cmd, *args, **kwargs)
+
+header_comment_strip_re = re.compile(r'''
+    ^
+    [#][ ]Translations[ ]template[ ]for[ ]Kallithea[.] \n
+    |
+    ^
+    [#][ ]FIRST[ ]AUTHOR[ ]<EMAIL@ADDRESS>,[ ]\d+[.] \n
+    (?:[#] \n)?
+    |
+    ^
+    (?:[#] \n)?
+    [#],[ ]fuzzy \n
+    |
+    ^
+    [#][ ][#],[ ]fuzzy \n
+    ''',
+    re.MULTILINE|re.VERBOSE)
+
+header_normalize_re = re.compile(r'''
+    ^ "
+    (POT-Creation-Date|PO-Revision-Date|Last-Translator|Language-Team|X-Generator|Generated-By|Project-Id-Version):
+    [ ][^\\]*\\n
+    " \n
+    ''',
+    re.MULTILINE|re.IGNORECASE|re.VERBOSE)
+
+def _normalize_po(raw_content):
+    r"""
+    >>> print(_normalize_po(r'''
+    ... # header comment
+    ...
+    ...
+    ... # comment before header
+    ... msgid ""
+    ... msgstr "yada"
+    ... "POT-Creation-Date: 2019-05-04 21:13+0200\n"
+    ... "MIME-Version: "
+    ... "1.0\n"
+    ... "Last-Translator: Jabba"
+    ... "the Hutt\n"
+    ... "X-Generator: Weblate 1.2.3\n"
+    ...
+    ... # comment, but not in header
+    ... msgid "None"
+    ... msgstr "Ingen"
+    ...
+    ...
+    ... line 2
+    ... # third comment
+    ...
+    ... msgid "Special"
+    ... msgstr ""
+    ...
+    ... msgid "Specialist"
+    ... # odd comment
+    ... msgstr ""
+    ... "Expert"
+    ...
+    ... # crazy fuzzy auto translation by msgmerge, using foo for bar
+    ... #, fuzzy
+    ... #| msgid "some foo string"
+    ... msgid "some bar string."
+    ... msgstr "translation of foo string"
+    ...
+    ... msgid "%d minute"
+    ... msgid_plural "%d minutes"
+    ... msgstr[0] "minut"
+    ... msgstr[1] "minutter"
+    ... msgstr[2] ""
+    ...
+    ... msgid "%d year"
+    ... msgid_plural "%d years"
+    ... msgstr[0] ""
+    ... msgstr[1] ""
+    ...
+    ... # last comment
+    ... ''') + '^^^')
+    # header comment
+    <BLANKLINE>
+    <BLANKLINE>
+    # comment before header
+    <BLANKLINE>
+    msgid ""
+    msgstr "yada"
+    "MIME-Version: "
+    "1.0\n"
+    <BLANKLINE>
+    msgid "None"
+    msgstr "Ingen"
+    <BLANKLINE>
+    line 2
+    <BLANKLINE>
+    msgid "Specialist"
+    msgstr ""
+    "Expert"
+    <BLANKLINE>
+    msgid "%d minute"
+    msgid_plural "%d minutes"
+    msgstr[0] "minut"
+    msgstr[1] "minutter"
+    msgstr[2] ""
+    ^^^
+    """
+    header_start = raw_content.find('\nmsgid ""\n') + 1
+    header_end = raw_content.find('\n\n', header_start) + 1 or len(raw_content)
+    chunks = [
+        header_comment_strip_re.sub('', raw_content[0:header_start])
+            .strip(),
+        '',
+        header_normalize_re.sub('', raw_content[header_start:header_end])
+            .replace(
+                r'"Content-Type: text/plain; charset=utf-8\n"',
+                r'"Content-Type: text/plain; charset=UTF-8\n"')  # maintain msgmerge casing
+            .strip(),
+        '']  # preserve normalized header
+    # all chunks are separated by empty line
+    for raw_chunk in raw_content[header_end:].split('\n\n'):
+        if '\n#, fuzzy' in raw_chunk:  # might be like "#, fuzzy, python-format"
+            continue  # drop crazy auto translation that is worse than useless
+        # strip all comment lines from chunk
+        chunk_lines = [
+            line
+            for line in raw_chunk.splitlines()
+            if line
+            and not line.startswith('#')
+        ]
+        if not chunk_lines:
+            continue
+        # check lines starting from first msgstr, skip chunk if no translation lines
+        msgstr_i = [i for i, line in enumerate(chunk_lines) if line.startswith('msgstr')]
+        if (
+            chunk_lines[0].startswith('msgid') and
+            msgstr_i and
+            all(line.endswith(' ""') for line in chunk_lines[msgstr_i[0]:])
+        ):  # skip translation chunks that doesn't have any actual translations
+            continue
+        chunks.append('\n'.join(chunk_lines) + '\n')
+    return '\n'.join(chunks)
+
+def _normalize_po_file(po_file, merge_pot_file=None, strip=False):
+    if merge_pot_file:
+        runcmd(['msgmerge', '--width=76', '--backup=none', '--previous',
+                '--update', po_file, '-q', merge_pot_file])
+    if strip:
+        po_tmp = po_file + '.tmp'
+        with open(po_file, 'r') as src, open(po_tmp, 'w') as dest:
+            raw_content = src.read()
+            normalized_content = _normalize_po(raw_content)
+            dest.write(normalized_content)
+        os.rename(po_tmp, po_file)
+
+def _normalized_diff(file1, file2, merge_pot_file=None, strip=False):
+    # Create temporary copies of both files
+    temp1 = tempfile.NamedTemporaryFile(prefix=os.path.basename(file1))
+    temp2 = tempfile.NamedTemporaryFile(prefix=os.path.basename(file2))
+    debug('normalized_diff: %s -> %s / %s -> %s' % (file1, temp1.name, file2, temp2.name))
+    shutil.copyfile(file1, temp1.name)
+    shutil.copyfile(file2, temp2.name)
+    # Normalize them in place
+    _normalize_po_file(temp1.name, merge_pot_file=merge_pot_file, strip=strip)
+    _normalize_po_file(temp2.name, merge_pot_file=merge_pot_file, strip=strip)
+    # Now compare
+    try:
+        runcmd(['diff', '-u', temp1.name, temp2.name])
+    except subprocess.CalledProcessError as e:
+        return e.returncode
--- a/scripts/logformat.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/logformat.py	Sat May 02 21:20:43 2020 +0200
@@ -1,6 +1,4 @@
-#!/usr/bin/env python2
-
-from __future__ import print_function
+#!/usr/bin/env python3
 
 import re
 import sys
--- a/scripts/make-release	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/make-release	Sat May 02 21:20:43 2020 +0200
@@ -15,7 +15,7 @@
 trap cleanup EXIT
 
 echo "Setting up a fresh virtualenv in $venv"
-virtualenv -p python2 "$venv"
+python3 -m venv "$venv"
 . "$venv/bin/activate"
 
 echo "Install/verify tools needed for building and uploading stuff"
@@ -35,8 +35,8 @@
 sed -e 's/[^ ]*[ ]*\([^ ]*\).*/\1/g' MANIFEST.in | xargs ls -lad
 
 echo "Build dist"
-python2 setup.py compile_catalog
-python2 setup.py sdist
+python3 setup.py compile_catalog
+python3 setup.py sdist
 
 echo "Verify VERSION from kallithea/__init__.py"
 namerel=$(cd dist && echo Kallithea-*.tar.gz)
@@ -49,7 +49,7 @@
 diff -u <((hg mani | grep -v '^\.hg\|^kallithea/i18n/en/LC_MESSAGES/kallithea.mo$') | LANG=C sort) <(tar tf dist/Kallithea-$version.tar.gz | sed "s|^$namerel/||" | grep . | grep -v '^kallithea/i18n/.*/LC_MESSAGES/kallithea.mo$\|^Kallithea.egg-info/\|^PKG-INFO$\|/$' | LANG=C sort)
 
 echo "Verify docs build"
-python2 setup.py build_sphinx # the results are not actually used, but we want to make sure it builds
+python3 setup.py build_sphinx # the results are not actually used, but we want to make sure it builds
 
 echo "Shortlog for inclusion in the release announcement"
 scripts/shortlog.py "only('.', branch('stable') & tagged() & public() & not '.')"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/pyflakes	Sat May 02 21:20:43 2020 +0200
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+"""
+pyflakes with filter configuration for Kallithea.
+Inspired by pyflakes/api.py and flake8/plugins/pyflakes.py .
+"""
+
+import sys
+
+import pyflakes.api
+import pyflakes.messages
+
+
+class Reporter:
+
+    warned = False
+
+    def flake(self, warning):
+        # ignore known warnings
+        if isinstance(warning, pyflakes.messages.UnusedVariable):
+            return
+        if warning.filename == 'kallithea/bin/kallithea_cli_ishell.py':
+            if isinstance(warning, pyflakes.messages.ImportStarUsed) and warning.message_args == ('kallithea.model.db',):
+                return
+            if isinstance(warning, pyflakes.messages.UnusedImport) and warning.message_args == ('kallithea.model.db.*',):
+                return
+
+        print('%s:%s %s   [%s %s]' % (warning.filename, warning.lineno, warning.message % warning.message_args, type(warning).__name__, warning.message_args))
+        self.warned = True
+
+    def unexpectedError(self, filename, msg):
+        print('Unexpected error for %s: %s' % (filename, msg))
+
+
+reporter = Reporter()
+
+for filename in sorted(set(sys.argv[1:])):
+    pyflakes.api.checkPath(filename, reporter=reporter)
+if reporter.warned:
+    raise SystemExit(1)
--- a/scripts/run-all-cleanup	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/run-all-cleanup	Sat May 02 21:20:43 2020 +0200
@@ -8,3 +8,6 @@
 scripts/docs-headings.py
 scripts/generate-ini.py
 scripts/whitespacecleanup.sh
+
+hg loc 'set:!binary()&grep("^#!.*python")' '*.py' | xargs scripts/pyflakes
+echo "no blocking problems found by $0"
--- a/scripts/shortlog.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/shortlog.py	Sat May 02 21:20:43 2020 +0200
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 
 """
--- a/scripts/update-copyrights.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/update-copyrights.py	Sat May 02 21:20:43 2020 +0200
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 
 """
@@ -51,11 +51,14 @@
     * first contribution
     * number of contribution years
     * name (with some unicode normalization)
-    The entries must be 2-tuples of a list of string years and the unicode name"""
-    return (x[0] and -int(x[0][-1]),
-            x[0] and int(x[0][0]),
-            -len(x[0]),
-            x[1].decode('utf-8').lower().replace(u'\xe9', u'e').replace(u'\u0142', u'l')
+    The entries must be 2-tuples of a list of string years and the name"""
+    years, name = x
+    if not years:
+        years = ['0']
+    return (-int(years[-1]),  # primarily sort by latest contribution
+            int(years[0]),  # then sort by first contribution
+            -len(years),  # then sort by length of contribution (no gaps)
+            name.lower().replace('\xe9', 'e').replace('\u0142', 'l')  # finally sort by name
         )
 
 
@@ -134,7 +137,7 @@
         all_entries=repo_entries + contributor_data.other_about + contributor_data.other,
         no_entries=contributor_data.no_about,
         domain_extra=contributor_data.domain_extra,
-        split_re=r'(?:  <li>Copyright &copy; [^\n]*</li>\n)*',
+        split_re=r'(?:  <li>Copyright &copy; [^\n]+</li>\n)+',
         normalize_name=lambda name: name.split('<', 1)[0].strip(),
         format_f=lambda years, name: '  <li>Copyright &copy; %s, %s</li>\n' % (nice_years(years, '&ndash;', ', '), name),
         )
@@ -144,7 +147,7 @@
         all_entries=repo_entries + contributor_data.other_contributors + contributor_data.other,
         no_entries=contributor_data.total_ignore,
         domain_extra=contributor_data.domain_extra,
-        split_re=r'(?:    [^\n]*\n)*',
+        split_re=r'(?:    [^\n]+\n)+',
         normalize_name=lambda name: name,
         format_f=lambda years, name: ('    %s%s%s\n' % (name, ' ' if years else '', nice_years(years))),
         )
@@ -154,7 +157,7 @@
         all_entries=repo_entries,
         no_entries=contributor_data.total_ignore,
         domain_extra={},
-        split_re=r'(?<=&copy;) .* (?=by various authors)',
+        split_re=r'(?<=&copy;) .+ (?=by various authors)',
         normalize_name=lambda name: '',
         format_f=lambda years, name: ' ' + nice_years(years, '&ndash;', ', ') + ' ',
         )
@@ -165,7 +168,7 @@
         all_entries=repo_entries,
         no_entries=contributor_data.total_ignore,
         domain_extra={},
-        split_re=r"(?<=copyright = u').*(?= by various authors)",
+        split_re=r"(?<=copyright = ').+(?= by various authors)",
         normalize_name=lambda name: '',
         format_f=lambda years, name: nice_years(years, '-', ', '),
         )
--- a/scripts/validate-commits	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/validate-commits	Sat May 02 21:20:43 2020 +0200
@@ -34,20 +34,24 @@
     hg update "$rev"
 
     cleanup
-    virtualenv -p "$(command -v python2)" "$venv"
+    python3 -m venv "$venv"
     source "$venv/bin/activate"
     pip install --upgrade pip setuptools
     pip install -e . -r dev_requirements.txt python-ldap python-pam
 
     # run-all-cleanup
-    scripts/run-all-cleanup
-    if ! hg update --check -q .; then
-        echo "run-all-cleanup did not give clean results!"
+    if ! scripts/run-all-cleanup ; then
+        echo "run-all-cleanup encountered errors!"
         result="NOK"
-        hg diff
-        hg revert -a
     else
-        result=" OK"
+        if ! hg update --check -q .; then
+            echo "run-all-cleanup did not give clean results!"
+            result="NOK"
+            hg diff
+            hg revert -a
+        else
+            result=" OK"
+        fi
     fi
     echo "$result: $rev (run-all-cleanup)" >> "$resultfile"
 
--- a/scripts/validate-minimum-dependency-versions	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/validate-minimum-dependency-versions	Sat May 02 21:20:43 2020 +0200
@@ -28,14 +28,11 @@
 sed -n 's/.*"\(.*\)>=\(.*\)".*/\1==\2/p' setup.py > "$min_requirements"
 sed 's/>=/==/p' dev_requirements.txt >> "$min_requirements"
 
-virtualenv -p "$(command -v python2)" "$venv"
+python3 -m venv "$venv"
 source "$venv/bin/activate"
 pip install --upgrade pip setuptools
 pip install -e . -r "$min_requirements" python-ldap python-pam 2> >(tee "$log" >&2)
 
-# Strip out the known Python 2.7 deprecation message.
-sed -i '/DEPRECATION: Python 2\.7 /d' "$log"
-
 # Treat any message on stderr as a problem, for the caller to interpret.
 if [ -s "$log" ]; then
     echo
--- a/scripts/whitespacecleanup.sh	Thu Apr 09 18:03:56 2020 +0200
+++ b/scripts/whitespacecleanup.sh	Sat May 02 21:20:43 2020 +0200
@@ -1,4 +1,4 @@
-#!/bin/bash -x
+#!/bin/bash -xe
 
 # Enforce some consistency in whitespace - just to avoid spurious whitespaces changes
 
@@ -18,6 +18,7 @@
 hg loc 'set:!binary()&grep("^#!")&!(**_tmpl.py)&!(**/template**)' | xargs chmod +x
 
 # isort is installed from dev_requirements.txt
-isort --line-width 160 --wrap-length 160 --lines-after-imports 2 `hg loc '*.py'`
+hg loc 'set:!binary()&grep("^#!.*python")' '*.py' | xargs isort --line-width 160 --lines-after-imports 2
 
+echo "diff after $0:"
 hg diff
--- a/setup.py	Thu Apr 09 18:03:56 2020 +0200
+++ b/setup.py	Sat May 02 21:20:43 2020 +0200
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 import os
 import platform
@@ -9,8 +9,8 @@
 from setuptools.command import sdist
 
 
-if sys.version_info < (2, 6) or sys.version_info >= (3,):
-    raise Exception('Kallithea requires python 2.7')
+if sys.version_info < (3, 6):
+    raise Exception('Kallithea requires Python 3.6 or later')
 
 
 here = os.path.abspath(os.path.dirname(__file__))
@@ -20,16 +20,17 @@
     import re
     matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
     if matches:
-        if not callable(callback_handler):
-            callback_handler = lambda v: v
+        s = eval(matches.groups()[0])
+        if callable(callback_handler):
+            return callback_handler(s)
+        return s
 
-        return callback_handler(eval(matches.groups()[0]))
-
-_meta = open(os.path.join(here, 'kallithea', '__init__.py'), 'rb')
+_meta = open(os.path.join(here, 'kallithea', '__init__.py'), 'r')
 _metadata = _meta.read()
 _meta.close()
 
-callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
+def callback(V):
+    return '.'.join(map(str, V[:3])) + '.'.join(V[3:])
 __version__ = _get_meta_var('VERSION', _metadata, callback)
 __license__ = _get_meta_var('__license__', _metadata)
 __author__ = _get_meta_var('__author__', _metadata)
@@ -40,40 +41,40 @@
 is_windows = __platform__ in ['Windows']
 
 requirements = [
-    "alembic >= 0.8.0, < 1.1",
+    "alembic >= 1.0.10, < 1.5",
     "gearbox >= 0.1.0, < 1",
-    "waitress >= 0.8.8, < 1.4",
-    "WebOb >= 1.7, < 1.9",
+    "waitress >= 0.8.8, < 1.5",
+    "WebOb >= 1.8, < 1.9",
     "backlash >= 0.1.2, < 1",
-    "TurboGears2 >= 2.3.10, < 2.5",
+    "TurboGears2 >= 2.4, < 2.5",
     "tgext.routes >= 0.2.0, < 1",
-    "Beaker >= 1.7.0, < 2",
-    "WebHelpers >= 1.3, < 1.4",
+    "Beaker >= 1.10.1, < 2",
     "WebHelpers2 >= 2.0, < 2.1",
-    "FormEncode >= 1.3.0, < 1.4",
-    "SQLAlchemy >= 1.1, < 1.4",
-    "Mako >= 0.9.0, < 1.1",
-    "Pygments >= 2.2.0, < 2.5",
-    "Whoosh >= 2.5.0, < 2.8",
-    "celery >= 3.1, < 4.0", # TODO: celery 4 doesn't work
-    "Babel >= 1.3, < 2.8",
-    "python-dateutil >= 1.5.0, < 2.9",
+    "FormEncode >= 1.3.1, < 1.4",
+    "SQLAlchemy >= 1.2.9, < 1.4",
+    "Mako >= 0.9.1, < 1.2",
+    "Pygments >= 2.2.0, < 2.6",
+    "Whoosh >= 2.7.1, < 2.8",
+    "celery >= 4.3, < 4.5",
+    "Babel >= 1.3, < 2.9",
+    "python-dateutil >= 2.1.0, < 2.9",
     "Markdown >= 2.2.1, < 3.2",
-    "docutils >= 0.11, < 0.15",
+    "docutils >= 0.11, < 0.17",
     "URLObject >= 2.3.4, < 2.5",
-    "Routes >= 1.13, < 2", # TODO: bumping to 2.0 will make test_file_annotation fail
-    "dulwich >= 0.14.1, < 0.20",
-    "mercurial >= 4.5, < 5.3",
-    "decorator >= 3.3.2, < 4.5",
-    "Paste >= 2.0.3, < 3.1",
-    "bleach >= 3.0, < 3.2",
+    "Routes >= 2.0, < 2.5",
+    "dulwich >= 0.19.0, < 0.20",
+    "mercurial >= 5.2, < 5.5",
+    "decorator >= 4.2.1, < 4.5",
+    "Paste >= 2.0.3, < 3.4",
+    "bleach >= 3.0, < 3.1.4",
     "Click >= 7.0, < 8",
-    "ipaddr >= 2.1.10, < 2.3",
+    "ipaddr >= 2.2.0, < 2.3",
+    "paginate >= 0.5, < 0.6",
+    "paginate_sqlalchemy >= 0.3.0, < 0.4",
+    "bcrypt >= 3.1.0, < 3.2",
+    "pip >= 20.0, < 999",
 ]
 
-if not is_windows:
-    requirements.append("bcrypt >= 3.1.0, < 3.2")
-
 dependency_links = [
 ]
 
@@ -84,8 +85,9 @@
     'Intended Audience :: Developers',
     'License :: OSI Approved :: GNU General Public License (GPL)',
     'Operating System :: OS Independent',
-    'Programming Language :: Python',
-    'Programming Language :: Python :: 2.7',
+    'Programming Language :: Python :: 3.6',
+    'Programming Language :: Python :: 3.7',
+    'Programming Language :: Python :: 3.8',
     'Topic :: Software Development :: Version Control',
 ]
 
@@ -110,8 +112,8 @@
     long_description = open(README_FILE).read()
 except IOError as err:
     sys.stderr.write(
-        "[WARNING] Cannot find file specified as long_description (%s)\n"
-        % README_FILE
+        "[WARNING] Cannot find file specified as long_description (%s): %s\n"
+        % (README_FILE, err)
     )
     long_description = description