changeset 8895:747cc853b5e9 stable

release: merge default to stable for 0.7.0
author Thomas De Schampheleire <thomas.de_schampheleire@nokia.com>
date Thu, 27 May 2021 21:27:37 +0200
parents 8bad7d298171 (current diff) cf4c300fe036 (diff)
children a8b59e65b98f
files
diffstat 370 files changed, 12406 insertions(+), 13930 deletions(-) [+]
line wrap: on
line diff
--- a/.coveragerc	Sun May 09 08:42:17 2021 +0200
+++ b/.coveragerc	Thu May 27 21:27:37 2021 +0200
@@ -8,9 +8,6 @@
     kallithea/lib/dbmigrate/*
     # the tests themselves should not be part of the coverage report
     kallithea/tests/*
-    # the scm hooks are not run in the kallithea process
-    kallithea/config/post_receive_tmpl.py
-    kallithea/config/pre_receive_tmpl.py
 
 # same omit lines should be present in sections 'run' and 'report'
 [report]
@@ -23,9 +20,6 @@
     kallithea/lib/dbmigrate/*
     # the tests themselves should not be part of the coverage report
     kallithea/tests/*
-    # the scm hooks are not run in the kallithea process
-    kallithea/config/post_receive_tmpl.py
-    kallithea/config/pre_receive_tmpl.py
 
 [paths]
 source =
--- a/.hgignore	Sun May 09 08:42:17 2021 +0200
+++ b/.hgignore	Thu May 27 21:27:37 2021 +0200
@@ -10,16 +10,15 @@
 *.rej
 *.bak
 .eggs/
-tarballcache/
 
 syntax: regexp
-^rcextensions
-^build
-^dist/
-^docs/build/
-^docs/_build/
+^extensions\.py$
+^build$
+^dist$
+^docs/build$
+^docs/_build$
 ^data$
-^sql_dumps/
+^sql_dumps$
 ^\.settings$
 ^\.project$
 ^\.pydevproject$
@@ -48,8 +47,13 @@
 ^test\.db$
 ^Kallithea\.egg-info$
 ^my\.ini$
-^fabfile.py
+^fabfile\.py$
 ^\.idea$
 ^\.cache$
 ^\.pytest_cache$
+^venv$
 /__pycache__$
+^deps\.dot$
+^deps\.svg$
+^deps\.txt$
+^\.pytype/
--- a/CONTRIBUTORS	Sun May 09 08:42:17 2021 +0200
+++ b/CONTRIBUTORS	Thu May 27 21:27:37 2021 +0200
@@ -1,19 +1,26 @@
 List of contributors to Kallithea project:
 
-    Thomas De Schampheleire <thomas.de_schampheleire@nokia.com> 2014-2020
-    Mads Kiilerich <mads@kiilerich.com> 2016-2020
+    Thomas De Schampheleire <thomas.de_schampheleire@nokia.com> 2014-2021
+    Mads Kiilerich <mads@kiilerich.com> 2016-2021
+    ssantos <ssantos@web.de> 2018-2021
+    Private <adamantine.sword@gmail.com> 2019-2021
+    Étienne Gilli <etienne@gilli.io> 2020-2021
+    fresh <fresh190@protonmail.com> 2020-2021
+    robertus <robertuss12@gmail.com> 2020-2021
+    Eugenia Russell <eugenia.russell2019@gmail.com> 2021
+    Michalis <michalisntovas@yahoo.gr> 2021
+    vs <vsuhachev@yandex.ru> 2021
+    Александр <akonn7@mail.ru> 2021
     Asterios Dimitriou <steve@pci.gr> 2016-2017 2020
     Allan Nordhøy <epost@anotheragency.no> 2017-2020
     Anton Schur <tonich.sh@gmail.com> 2017 2020
-    ssantos <ssantos@web.de> 2018-2020
     Manuel Jacob <me@manueljacob.de> 2019-2020
-    Private <adamantine.sword@gmail.com> 2019-2020
+    Artem <kovalevartem.ru@gmail.com> 2020
     David Ignjić <ignjic@gmail.com> 2020
     Dennis Fink <dennis.fink@c3l.lu> 2020
-    Étienne Gilli <etienne@gilli.io> 2020
     J. Lavoie <j.lavoie@net-c.ca> 2020
-    robertus <robertuss12@gmail.com> 2020
     Ross Thomas <ross@lns-nevasoft.com> 2020
+    Tim Ooms <tatankat@users.noreply.github.com> 2020
     Andrej Shadura <andrew@shadura.me> 2012 2014-2017 2019
     Étienne Gilli <etienne.gilli@gmail.com> 2015-2017 2019
     Adi Kriegisch <adi@cg.tuwien.ac.at> 2019
--- a/MANIFEST.in	Sun May 09 08:42:17 2021 +0200
+++ b/MANIFEST.in	Thu May 27 21:27:37 2021 +0200
@@ -18,7 +18,6 @@
 recursive-include init.d *
 recursive-include kallithea/alembic *
 include           kallithea/bin/ldap_sync.conf
-include           kallithea/lib/paster_commands/template.ini.mako
 recursive-include kallithea/front-end *
 recursive-include kallithea/i18n *
 recursive-include kallithea/public *
--- a/README.rst	Sun May 09 08:42:17 2021 +0200
+++ b/README.rst	Thu May 27 21:27:37 2021 +0200
@@ -74,8 +74,8 @@
   web interface using simple editor or upload binary files using simple form.
 - Powerful pull request driven review system with inline commenting, changeset
   statuses, and notification system.
-- Importing and syncing repositories from remote locations for Git_, Mercurial_
-  and Subversion.
+- Importing and syncing repositories from remote locations for Git_ and
+  Mercurial_.
 - Mako templates let you customize the look and feel of the application.
 - Beautiful diffs, annotations and source code browsing all colored by
   pygments. Raw diffs are made in Git-diff format for both VCS systems,
@@ -175,7 +175,6 @@
 .. _Mercurial: http://mercurial.selenic.com/
 .. _Bitbucket: http://bitbucket.org/
 .. _GitHub: http://github.com/
-.. _Subversion: http://subversion.tigris.org/
 .. _Git: http://git-scm.com/
 .. _Celery: http://celeryproject.org/
 .. _Software Freedom Conservancy: http://sfconservancy.org/
--- a/dev_requirements.txt	Sun May 09 08:42:17 2021 +0200
+++ b/dev_requirements.txt	Thu May 27 21:27:37 2021 +0200
@@ -1,9 +1,9 @@
-pytest >= 4.6.6, < 5.4
+pytest >= 4.6.6, < 5.5
 pytest-sugar >= 0.9.2, < 0.10
 pytest-benchmark >= 3.2.2, < 3.3
 pytest-localserver >= 0.5.0, < 0.6
 mock >= 3.0.0, < 4.1
-Sphinx >= 1.8.0, < 2.4
+Sphinx >= 1.8.0, < 3.1
 WebTest >= 2.0.6, < 2.1
-isort == 4.3.21
-pyflakes == 2.1.1
+isort == 5.1.2
+pyflakes == 2.2.0
--- a/development.ini	Sun May 09 08:42:17 2021 +0200
+++ b/development.ini	Thu May 27 21:27:37 2021 +0200
@@ -67,11 +67,11 @@
 host = 0.0.0.0
 port = 5000
 
-## WAITRESS ##
+## Gearbox serve uses the Waitress web server ##
 use = egg:waitress#main
-## number of worker threads
+## avoid multi threading
 threads = 1
-## MAX BODY SIZE 100GB
+## allow push of repos bigger than the default of 1 GB
 max_request_body_size = 107374182400
 ## use poll instead of select, fixes fd limits, may not work on old
 ## windows systems.
@@ -81,6 +81,7 @@
 #[filter:proxy-prefix]
 #use = egg:PasteDeploy#prefix
 #prefix = /<your-prefix>
+#translate_forwarded_server = False
 
 [app:main]
 use = egg:kallithea
@@ -102,7 +103,7 @@
 index_dir = %(here)s/data/index
 
 ## uncomment and set this path to use archive download cache
-archive_cache_dir = %(here)s/tarballcache
+archive_cache_dir = %(here)s/data/tarballcache
 
 ## change this to unique ID for security
 #app_instance_uuid = VERY-SECRET
@@ -111,11 +112,17 @@
 ## cut off limit for large diffs (size in bytes)
 cut_off_limit = 256000
 
-## force https in Kallithea, fixes https redirects, assumes it's always https
-force_https = false
+## WSGI environment variable to get the IP address of the client (default REMOTE_ADDR)
+#remote_addr_variable = HTTP_X_FORWARDED_FOR
+
+## WSGI environment variable to get the protocol (http or https) of the client connection (default wsgi.url_scheme)
+#url_scheme_variable = HTTP_X_FORWARDED_PROTO
 
-## use Strict-Transport-Security headers
-use_htsts = false
+## always pretend the client connected using HTTPS (default false)
+#force_https = true
+
+## use Strict-Transport-Security headers (default false)
+#use_htsts = true
 
 ## number of commits stats will parse on each iteration
 commit_parse_limit = 25
@@ -259,15 +266,8 @@
 ## Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea':
 celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost
 
-celery.result_backend = db+sqlite:///celery-results.db
-
-#celery.amqp.task.result.expires = 18000
-
 celery.worker_concurrency = 2
-celery.worker_max_tasks_per_child = 1
-
-## If true, tasks will never be sent to the queue, but executed locally instead.
-celery.task_always_eager = false
+celery.worker_max_tasks_per_child = 100
 
 ####################################
 ##          BEAKER CACHE          ##
@@ -346,7 +346,6 @@
 get trace_errors.smtp_password = smtp_password
 get trace_errors.smtp_use_tls = smtp_use_tls
 
-
 ##################################
 ##        LOGVIEW CONFIG        ##
 ##################################
@@ -359,10 +358,10 @@
 ##      DB CONFIG      ##
 #########################
 
-## SQLITE [default]
 sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
-
-## see sqlalchemy docs for other backends
+#sqlalchemy.url = postgresql://kallithea:password@localhost/kallithea
+#sqlalchemy.url = mysql://kallithea:password@localhost/kallithea?charset=utf8mb4
+## Note: the mysql:// prefix should also be used for MariaDB
 
 sqlalchemy.pool_recycle = 3600
 
--- a/docs/conf.py	Sun May 09 08:42:17 2021 +0200
+++ b/docs/conf.py	Thu May 27 21:27:37 2021 +0200
@@ -14,7 +14,7 @@
 import os
 import sys
 
-from kallithea import __version__
+import kallithea
 
 
 # If extensions (or modules to document with autodoc) are in another directory,
@@ -47,7 +47,7 @@
 
 # General information about the project.
 project = 'Kallithea'
-copyright = '2010-2020 by various authors, licensed as GPLv3.'
+copyright = '2010-2021 by various authors, licensed as GPLv3.'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
@@ -56,9 +56,9 @@
 # The short X.Y version.
 root = os.path.dirname(os.path.dirname(__file__))
 sys.path.append(root)
-version = __version__
+version = kallithea.__version__
 # The full version, including alpha/beta/rc tags.
-release = __version__
+release = kallithea.__version__
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
--- a/docs/contributing.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/contributing.rst	Thu May 27 21:27:37 2021 +0200
@@ -26,12 +26,13 @@
 Getting started
 ---------------
 
-To get started with Kallithea development::
+To get started with Kallithea development run the following commands in your
+bash shell::
 
         hg clone https://kallithea-scm.org/repos/kallithea
         cd kallithea
-        python3 -m venv ../kallithea-venv
-        source ../kallithea-venv/bin/activate
+        python3 -m venv venv
+        . venv/bin/activate
         pip install --upgrade pip setuptools
         pip install --upgrade -e . -r dev_requirements.txt python-ldap python-pam
         kallithea-cli config-create my.ini
@@ -71,6 +72,94 @@
 .. _contributing-tests:
 
 
+Internal dependencies
+---------------------
+
+We try to keep the code base clean and modular and avoid circular dependencies.
+Code should only invoke code in layers below itself.
+
+Imports should import whole modules ``from`` their parent module, perhaps
+``as`` a shortened name. Avoid imports ``from`` modules.
+
+To avoid cycles and partially initialized modules, ``__init__.py`` should *not*
+contain any non-trivial imports. The top level of a module should *not* be a
+facade for the module functionality.
+
+Common code for a module is often in ``base.py``.
+
+The important part of the dependency graph is approximately linear. In the
+following list, modules may only depend on modules below them:
+
+``tests``
+  Just get the job done - anything goes.
+
+``bin/`` & ``config/`` & ``alembic/``
+  The main entry points, defined in ``setup.py``. Note: The TurboGears template
+  use ``config`` for the high WSGI application - this is not for low level
+  configuration.
+
+``controllers/``
+  The top level web application, with TurboGears using the ``root`` controller
+  as entry point, and ``routing`` dispatching to other controllers.
+
+``templates/**.html``
+  The "view", rendering to HTML. Invoked by controllers which can pass them
+  anything from lower layers - especially ``helpers`` available as ``h`` will
+  cut through all layers, and ``c`` gives access to global variables.
+
+``lib/helpers.py``
+  High level helpers, exposing everything to templates as ``h``. It depends on
+  everything and has a huge dependency chain, so it should not be used for
+  anything else. TODO.
+
+``controllers/base.py``
+  The base class of controllers, with lots of model knowledge.
+
+``lib/auth.py``
+  All things related to authentication. TODO.
+
+``lib/utils.py``
+  High level utils with lots of model knowledge. TODO.
+
+``lib/hooks.py``
+  Hooks into "everything" to give centralized logging to database, cache
+  invalidation, and extension handling. TODO.
+
+``model/``
+  Convenience business logic wrappers around database models.
+
+``model/db.py``
+  Defines the database schema and provides some additional logic.
+
+``model/scm.py``
+  All things related to anything. TODO.
+
+SQLAlchemy
+  Database session and transaction in thread-local variables.
+
+``lib/utils2.py``
+  Low level utils specific to Kallithea.
+
+``lib/webutils.py``
+  Low level generic utils with awareness of the TurboGears environment.
+
+TurboGears
+  Request, response and state like i18n gettext in thread-local variables.
+  External dependency with global state - usage should be minimized.
+
+``lib/vcs/``
+  Previously an independent library. No awareness of web, database, or state.
+
+``lib/*``
+  Various "pure" functionality not depending on anything else.
+
+``__init__``
+  Very basic Kallithea constants - some of them are set very early based on ``.ini``.
+
+This is not exactly how it is right now, but we aim for something like that.
+Especially the areas marked as TODO have some problems that need untangling.
+
+
 Running tests
 -------------
 
@@ -84,6 +173,17 @@
 and the test suite creates repositories in the temporary directory. Linux
 systems with /tmp mounted noexec will thus fail.
 
+Tests can be run on PostgreSQL like::
+
+    sudo -u postgres createuser 'kallithea-test' --pwprompt  # password password
+    sudo -u postgres createdb 'kallithea-test' --owner 'kallithea-test'
+    REUSE_TEST_DB='postgresql://kallithea-test:password@localhost/kallithea-test' py.test
+
+Tests can be run on MariaDB/MySQL like::
+
+    echo "GRANT ALL PRIVILEGES ON \`kallithea-test\`.* TO 'kallithea-test'@'localhost' IDENTIFIED BY 'password'" | sudo -u mysql mysql
+    TEST_DB='mysql://kallithea-test:password@localhost/kallithea-test?charset=utf8mb4' py.test
+
 You can also use ``tox`` to run the tests with all supported Python versions.
 
 When running tests, Kallithea generates a `test.ini` based on template values
@@ -147,8 +247,9 @@
 lot about preservation of copyright and license information for existing code
 that is brought into the project.
 
-Contributions will be accepted in most formats -- such as commits hosted on your own Kallithea instance, or patches sent by
-email to the `kallithea-general`_ mailing list.
+Contributions will be accepted in most formats -- such as commits hosted on your
+own Kallithea instance, or patches sent by email to the `kallithea-general`_
+mailing list.
 
 Make sure to test your changes both manually and with the automatic tests
 before posting.
--- a/docs/index.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/index.rst	Thu May 27 21:27:37 2021 +0200
@@ -81,7 +81,6 @@
 .. _python: http://www.python.org/
 .. _django: http://www.djangoproject.com/
 .. _mercurial: https://www.mercurial-scm.org/
-.. _subversion: http://subversion.tigris.org/
 .. _git: http://git-scm.com/
 .. _celery: http://celeryproject.org/
 .. _Sphinx: http://sphinx.pocoo.org/
--- a/docs/installation.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/installation.rst	Thu May 27 21:27:37 2021 +0200
@@ -19,12 +19,12 @@
   installations side by side or remove it entirely by just removing the
   virtualenv directory) and does not require root privileges.
 
-- :ref:`installation-without-virtualenv`: The alternative method of installing
-  a Kallithea release is using standard pip. The package will be installed in
-  the same location as all other Python packages you have ever installed. As a
-  result, removing it is not as straightforward as with a virtualenv, as you'd
-  have to remove its dependencies manually and make sure that they are not
-  needed by other packages.
+- Kallithea can also be installed with plain pip - globally or with ``--user``
+  or similar. The package will be installed in the same location as all other
+  Python packages you have ever installed. As a result, removing it is not as
+  straightforward as with a virtualenv, as you'd have to remove its
+  dependencies manually and make sure that they are not needed by other
+  packages. We recommend using virtualenv.
 
 Regardless of the installation method you may need to make sure you have
 appropriate development packages installed, as installation of some of the
@@ -49,17 +49,24 @@
 -----------------------------------
 
 To install Kallithea in a virtualenv using the stable branch of the development
-repository, follow the instructions below::
+repository, use the following commands in your bash shell::
 
         hg clone https://kallithea-scm.org/repos/kallithea -u stable
         cd kallithea
-        python3 -m venv ../kallithea-venv
-        . ../kallithea-venv/bin/activate
+        python3 -m venv venv
+        . venv/bin/activate
         pip install --upgrade pip setuptools
         pip install --upgrade -e .
         python3 setup.py compile_catalog   # for translation of the UI
 
-You can now proceed to :ref:`setup`.
+.. note::
+   This will install all Python dependencies into the virtualenv. Kallithea
+   itself will however only be installed as a pointer to the source location.
+   The source clone must thus be kept in the same location, and it shouldn't be
+   updated to other revisions unless you want to upgrade. Edits in the source
+   tree will have immediate impact (possibly after a restart of the service).
+
+You can now proceed to :ref:`prepare-front-end-files`.
 
 .. _installation-virtualenv:
 
@@ -73,27 +80,30 @@
 problematic when upgrading the system or Kallithea.
 An additional benefit of virtualenv is that it doesn't require root privileges.
 
-- Assuming you have installed virtualenv, create a new virtual environment
-  for example, in `/srv/kallithea/venv`, using the venv command::
+- Don't install as root - install as a dedicated user like ``kallithea``.
+  If necessary, create the top directory for the virtualenv (like
+  ``/srv/kallithea/venv``) as root and assign ownership to the user.
+
+  Make a parent folder for the virtualenv (and perhaps also Kallithea
+  configuration and data files) such as ``/srv/kallithea``. Create the
+  directory as root if necessary and grant ownership to the ``kallithea`` user.
+
+- Create a new virtual environment, for example in ``/srv/kallithea/venv``,
+  specifying the right Python binary::
 
     python3 -m venv /srv/kallithea/venv
 
 - Activate the virtualenv in your current shell session and make sure the
-  basic requirements are up-to-date by running::
+  basic requirements are up-to-date by running the following commands in your
+  bash shell::
 
     . /srv/kallithea/venv/bin/activate
     pip install --upgrade pip setuptools
 
-.. note:: You can't use UNIX ``sudo`` to source the ``virtualenv`` script; it
-   will "activate" a shell that terminates immediately. It is also perfectly
-   acceptable (and desirable) to create a virtualenv as a normal user.
+.. note:: You can't use UNIX ``sudo`` to source the ``activate`` script; it
+   will "activate" a shell that terminates immediately.
 
-- Make a folder for Kallithea data files, and configuration somewhere on the
-  filesystem. For example::
-
-    mkdir /srv/kallithea
-
-- Go into the created directory and run this command to install Kallithea::
+- Install Kallithea in the activated virtualenv::
 
     pip install --upgrade kallithea
 
@@ -105,31 +115,30 @@
    This might require installation of development packages using your
    distribution's package manager.
 
-  Alternatively, download a .tar.gz from http://pypi.python.org/pypi/Kallithea,
-  extract it and install from source by running::
+   Alternatively, download a .tar.gz from http://pypi.python.org/pypi/Kallithea,
+   extract it and install from source by running::
 
-    pip install --upgrade .
+     pip install --upgrade .
 
 - This will install Kallithea together with all other required
   Python libraries into the activated virtualenv.
 
-You can now proceed to :ref:`setup`.
+You can now proceed to :ref:`prepare-front-end-files`.
 
-.. _installation-without-virtualenv:
+.. _prepare-front-end-files:
 
 
-Installing a released version without virtualenv
-------------------------------------------------
-
-For installation without virtualenv, 'just' use::
-
-    pip install kallithea
+Prepare front-end files
+-----------------------
 
-Note that this method requires root privileges and will install packages
-globally without using the system's package manager.
+Finally, the front-end files with CSS and JavaScript must be prepared. This
+depends on having some commands available in the shell search path: ``npm``
+version 6 or later, and ``node.js`` (version 12 or later) available as
+``node``. The installation method for these dependencies varies between
+operating systems and distributions.
 
-To install as a regular user in ``~/.local``, you can use::
+Prepare the front-end by running::
 
-    pip install --user kallithea
+    kallithea-cli front-end-build
 
 You can now proceed to :ref:`setup`.
--- a/docs/overview.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/overview.rst	Thu May 27 21:27:37 2021 +0200
@@ -20,23 +20,27 @@
 2. **Install Kallithea software.**
     This makes the ``kallithea-cli`` command line tool available.
 
-3. **Create low level configuration file.**
+3. **Prepare front-end files**
+    Some front-end files must be fetched or created using ``npm`` and ``node``
+    tooling so they can be served to the client as static files.
+
+4. **Create low level configuration file.**
     Use ``kallithea-cli config-create`` to create a ``.ini`` file with database
     connection info, mail server information, configuration for the specified
     web server, etc.
 
-4. **Populate the database.**
+5. **Populate the database.**
     Use ``kallithea-cli db-create`` with the ``.ini`` file to create the
     database schema and insert the most basic information: the location of the
     repository store and an initial local admin user.
 
-5. **Configure the web server.**
+6. **Configure the web server.**
     The web server must invoke the WSGI entrypoint for the Kallithea software
     using the ``.ini`` file (and thus the database). This makes the web
     application available so the local admin user can log in and tweak the
     configuration further.
 
-6. **Configure users.**
+7. **Configure users.**
     The initial admin user can create additional local users, or configure how
     users can be created and authenticated from other user directories.
 
@@ -44,6 +48,45 @@
 :ref:`setup` for details on these steps.
 
 
+File system location
+--------------------
+
+Kallithea can be installed in many different ways. The main parts are:
+
+- A location for the Kallithea software and its dependencies. This includes
+  the Python code, template files, and front-end code. After installation, this
+  will be read-only (except when upgrading).
+
+- A location for the ``.ini`` configuration file that tells the Kallithea
+  instance which database to use (and thus also the repository location).
+  After installation, this will be read-only (except when upgrading).
+
+- A location for various data files and caches for the Kallithea instance. This
+  is by default in a ``data`` directory next to the ``.ini`` file. This will
+  have to be writable by the running Kallithea service.
+
+- A database. The ``.ini`` file specifies which database to use. The database
+  will be a separate service and live elsewhere in the filesystem if using
+  PostgreSQL or MariaDB/MySQL. If using SQLite, it will by default live next to
+  the ``.ini`` file, as ``kallithea.db``.
+
+- A location for the repositories that are hosted by this Kallithea instance.
+  This will have to be writable by the running Kallithea service. The path to
+  this location will be configured in the database.
+
+For production setups, one recommendation is to use ``/srv/kallithea`` for the
+``.ini`` and ``data``, place the virtualenv in ``venv``, and use a Kallithea
+clone in ``kallithea``. Create a ``kallithea`` user, let it own
+``/srv/kallithea``, and run as that user when installing.
+
+For simple setups, it is fine to just use something like a ``kallithea`` user
+with home in ``/home/kallithea`` and place everything there.
+
+For experiments, it might be convenient to run everything as yourself and work
+inside a clone of Kallithea, with the ``.ini`` and SQLite database in the root
+of the clone, and a virtualenv in ``venv``.
+
+
 Python environment
 ------------------
 
@@ -177,7 +220,7 @@
   to get a configuration starting point for your choice of web server.
 
   (Gearbox will do like ``paste`` and use the WSGI application entry point
-  ``kallithea.config.middleware:make_app`` as specified in ``setup.py``.)
+  ``kallithea.config.application:make_app`` as specified in ``setup.py``.)
 
 - `Apache httpd`_ can serve WSGI applications directly using mod_wsgi_ and a
   simple Python file with the necessary configuration. This is a good option if
@@ -216,13 +259,13 @@
 .. _Python: http://www.python.org/
 .. _Gunicorn: http://gunicorn.org/
 .. _Gevent: http://www.gevent.org/
-.. _Waitress: http://waitress.readthedocs.org/en/latest/
-.. _Gearbox: http://turbogears.readthedocs.io/en/latest/turbogears/gearbox.html
+.. _Waitress: https://docs.pylonsproject.org/projects/waitress/
+.. _Gearbox: https://turbogears.readthedocs.io/en/latest/turbogears/gearbox.html
 .. _PyPI: https://pypi.python.org/pypi
 .. _Apache httpd: http://httpd.apache.org/
-.. _mod_wsgi: https://code.google.com/p/modwsgi/
+.. _mod_wsgi: https://modwsgi.readthedocs.io/
 .. _isapi-wsgi: https://github.com/hexdump42/isapi-wsgi
-.. _uWSGI: https://uwsgi-docs.readthedocs.org/en/latest/
+.. _uWSGI: https://uwsgi-docs.readthedocs.io/
 .. _nginx: http://nginx.org/en/
 .. _iis: http://en.wikipedia.org/wiki/Internet_Information_Services
 .. _pip: http://en.wikipedia.org/wiki/Pip_%28package_manager%29
--- a/docs/setup.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/setup.rst	Thu May 27 21:27:37 2021 +0200
@@ -5,35 +5,72 @@
 =====
 
 
-Setting up Kallithea
---------------------
+Setting up a Kallithea instance
+-------------------------------
+
+Some further details to the steps mentioned in the overview.
 
-First, you will need to create a Kallithea configuration file. Run the
-following command to do so::
+Create low level configuration file
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+First, you will need to create a Kallithea configuration file. The
+configuration file is a ``.ini`` file that contains various low level settings
+for Kallithea, e.g. configuration of how to use database, web server, email,
+and logging.
 
-    kallithea-cli config-create my.ini
+Change to the desired directory (such as ``/srv/kallithea``) as the right user
+and run the following command to create the file ``my.ini`` in the current
+directory::
+
+    kallithea-cli config-create my.ini http_server=waitress
 
-This will create the file ``my.ini`` in the current directory. This
-configuration file contains the various settings for Kallithea, e.g.
-proxy port, email settings, usage of static files, cache, Celery
-settings, and logging. Extra settings can be specified like::
+To get a good starting point for your configuration, specify the http server
+you intend to use. It can be ``waitress``, ``gearbox``, ``gevent``,
+``gunicorn``, or ``uwsgi``. (Apache ``mod_wsgi`` will not use this
+configuration file, and it is fine to keep the default http_server configuration
+unused. ``mod_wsgi`` is configured using ``httpd.conf`` directives and a WSGI
+wrapper script.)
+
+Extra custom settings can be specified like::
 
     kallithea-cli config-create my.ini host=8.8.8.8 "[handler_console]" formatter=color_formatter
 
-Next, you need to create the databases used by Kallithea. It is recommended to
-use PostgreSQL or SQLite (default). If you choose a database other than the
-default, ensure you properly adjust the database URL in your ``my.ini``
-configuration file to use this other database. Kallithea currently supports
-PostgreSQL, SQLite and MariaDB/MySQL databases. Create the database by running
-the following command::
+Populate the database
+^^^^^^^^^^^^^^^^^^^^^
+
+Next, you need to create the databases used by Kallithea. Kallithea currently
+supports PostgreSQL, SQLite and MariaDB/MySQL databases. It is recommended to
+start out using SQLite (the default) and move to PostgreSQL if it becomes a
+bottleneck or to get a "proper" database. MariaDB/MySQL is also supported.
+
+For PostgreSQL, run ``pip install psycopg2`` to get the database driver. Make
+sure the PostgreSQL server is initialized and running. Make sure you have a
+database user with password authentication with permissions to create databases
+- for example by running::
+
+    sudo -u postgres createuser 'kallithea' --pwprompt --createdb
+
+For MariaDB/MySQL, run ``pip install mysqlclient`` to get the ``MySQLdb``
+database driver. Make sure the database server is initialized and running. Make
+sure you have a database user with password authentication with permissions to
+create the database - for example by running::
+
+    echo 'CREATE USER "kallithea"@"localhost" IDENTIFIED BY "password"' | sudo -u mysql mysql
+    echo 'GRANT ALL PRIVILEGES ON `kallithea`.* TO "kallithea"@"localhost"' | sudo -u mysql mysql
+
+Check and adjust ``sqlalchemy.url`` in your ``my.ini`` configuration file to use
+this database.
+
+Create the database, tables, and initial content by running the following
+command::
 
     kallithea-cli db-create -c my.ini
 
-This will prompt you for a "root" path. This "root" path is the location where
-Kallithea will store all of its repositories on the current machine. After
-entering this "root" path ``db-create`` will also prompt you for a username
-and password for the initial admin account which ``db-create`` sets
-up for you.
+This will first prompt you for a "root" path. This "root" path is the location
+where Kallithea will store all of its repositories on the current machine. This
+location must be writable for the running Kallithea application. Next,
+``db-create`` will prompt you for a username and password for the initial admin
+account it sets up for you.
 
 The ``db-create`` values can also be given on the command line.
 Example::
@@ -48,19 +85,20 @@
 location to its database.  (Note: make sure you specify the correct
 path to the root).
 
-.. note:: the given path for Mercurial_ repositories **must** be write
-          accessible for the application. It's very important since
-          the Kallithea web interface will work without write access,
-          but when trying to do a push it will fail with permission
-          denied errors unless it has write access.
+.. note:: It is also possible to use an existing database. For example,
+          when using PostgreSQL without granting general createdb privileges to
+          the PostgreSQL kallithea user, set ``sqlalchemy.url =
+          postgresql://kallithea:password@localhost/kallithea`` and create the
+          database like::
 
-Finally, the front-end files must be prepared. This requires ``npm`` version 6
-or later, which needs ``node.js`` (version 12 or later). Prepare the front-end
-by running::
+              sudo -u postgres createdb 'kallithea' --owner 'kallithea'
+              kallithea-cli db-create -c my.ini --reuse
 
-    kallithea-cli front-end-build
+Running
+^^^^^^^
 
-You are now ready to use Kallithea. To run it simply execute::
+You are now ready to use Kallithea. To run it using a gearbox web server,
+simply execute::
 
     gearbox serve -c my.ini
 
@@ -186,7 +224,7 @@
 
 Kallithea provides full text search of repositories using `Whoosh`__.
 
-.. __: https://whoosh.readthedocs.io/en/latest/
+.. __: https://whoosh.readthedocs.io/
 
 For an incremental index build, run::
 
@@ -300,15 +338,21 @@
 Hook management
 ---------------
 
-Hooks can be managed in similar way to that used in ``.hgrc`` files.
+Custom Mercurial hooks can be managed in a similar way to that used in ``.hgrc`` files.
 To manage hooks, choose *Admin > Settings > Hooks*.
 
-The built-in hooks cannot be modified, though they can be enabled or disabled in the *VCS* section.
-
 To add another custom hook simply fill in the first textbox with
 ``<name>.<hook_type>`` and the second with the hook path. Example hooks
 can be found in ``kallithea.lib.hooks``.
 
+Kallithea will also use some hooks internally. They cannot be modified, but
+some of them can be enabled or disabled in the *VCS* section.
+
+Kallithea does not actively support custom Git hooks, but hooks can be installed
+manually in the file system. Kallithea will install and use the
+``post-receive`` Git hook internally, but it will then invoke
+``post-receive-custom`` if present.
+
 
 Changing default encoding
 -------------------------
@@ -362,6 +406,38 @@
    user that Kallithea runs.
 
 
+Proxy setups
+------------
+
+When Kallithea is processing HTTP requests from a user, it will see and use
+some of the basic properties of the connection, both at the TCP/IP level and at
+the HTTP level. The WSGI server will provide this information to Kallithea in
+the "environment".
+
+In some setups, a proxy server will take requests from users and forward
+them to the actual Kallithea server. The proxy server will thus be the
+immediate client of the Kallithea WSGI server, and Kallithea will basically see
+it as such. To make sure Kallithea sees the request as it arrived from the
+client to the proxy server, the proxy server must be configured to
+somehow pass the original information on to Kallithea, and Kallithea must be
+configured to pick that information up and trust it.
+
+Kallithea will by default rely on its WSGI server to provide the IP of the
+client in the WSGI environment as ``REMOTE_ADDR``, but it can be configured to
+get it from an HTTP header that has been set by the proxy server. For
+example, if the proxy server puts the client IP in the ``X-Forwarded-For``
+HTTP header, set::
+
+    remote_addr_variable = HTTP_X_FORWARDED_FOR
+
+Kallithea will by default rely on finding the protocol (``http`` or ``https``)
+in the WSGI environment as ``wsgi.url_scheme``. If the proxy server puts
+the protocol of the client request in the ``X-Forwarded-Proto`` HTTP header,
+Kallithea can be configured to trust that header by setting::
+
+    url_scheme_variable = HTTP_X_FORWARDED_PROTO
+
+
 HTTPS support
 -------------
 
@@ -370,10 +446,9 @@
 Alternatively, you can use some special configuration settings to control
 directly which scheme/protocol Kallithea will use when generating URLs:
 
-- With ``https_fixup = true``, the scheme will be taken from the
-  ``X-Url-Scheme``, ``X-Forwarded-Scheme`` or ``X-Forwarded-Proto`` HTTP header
-  (default ``http``).
-- With ``force_https = true`` the default will be ``https``.
+- With ``url_scheme_variable`` set, the scheme will be taken from that HTTP
+  header.
+- With ``force_https = true``, the scheme will be seen as ``https``.
 - With ``use_htsts = true``, Kallithea will set ``Strict-Transport-Security`` when using https.
 
 .. _nginx_virtual_host:
@@ -556,43 +631,19 @@
 
     WSGIRestrictEmbedded On
 
-- Create a WSGI dispatch script, like the one below. Make sure you
-  check that the paths correctly point to where you installed Kallithea
-  and its Python Virtual Environment.
+- Create a WSGI dispatch script, like the one below. The ``WSGIDaemonProcess``
+  ``python-home`` directive will make sure it uses the right Python Virtual
+  Environment and that paste thus can pick up the right Kallithea
+  application.
 
   .. code-block:: python
 
-      import os
-      os.environ['PYTHON_EGG_CACHE'] = '/srv/kallithea/.egg-cache'
-
-      # sometimes it's needed to set the current dir
-      os.chdir('/srv/kallithea/')
-
-      import site
-      site.addsitedir("/srv/kallithea/venv/lib/python3.7/site-packages")
-
       ini = '/srv/kallithea/my.ini'
       from logging.config import fileConfig
       fileConfig(ini, {'__file__': ini, 'here': '/srv/kallithea'})
       from paste.deploy import loadapp
       application = loadapp('config:' + ini)
 
-  Or using proper virtualenv activation:
-
-  .. code-block:: python
-
-      activate_this = '/srv/kallithea/venv/bin/activate_this.py'
-      execfile(activate_this, dict(__file__=activate_this))
-
-      import os
-      os.environ['HOME'] = '/srv/kallithea'
-
-      ini = '/srv/kallithea/kallithea.ini'
-      from logging.config import fileConfig
-      fileConfig(ini, {'__file__': ini, 'here': '/srv/kallithea'})
-      from paste.deploy import loadapp
-      application = loadapp('config:' + ini)
-
 - Add the necessary ``WSGI*`` directives to the Apache Virtual Host configuration
   file, like in the example below. Notice that the WSGI dispatch script created
   above is referred to with the ``WSGIScriptAlias`` directive.
@@ -617,15 +668,6 @@
       WSGIScriptAlias / /srv/kallithea/dispatch.wsgi
       WSGIPassAuthorization On
 
-  Or if using a dispatcher WSGI script with proper virtualenv activation:
-
-  .. code-block:: apache
-
-      WSGIDaemonProcess kallithea processes=5 threads=1 maximum-requests=100 lang=en_US.utf8
-      WSGIProcessGroup kallithea
-      WSGIScriptAlias / /srv/kallithea/dispatch.wsgi
-      WSGIPassAuthorization On
-
 
 Other configuration files
 -------------------------
--- a/docs/upgrade.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/upgrade.rst	Thu May 27 21:27:37 2021 +0200
@@ -39,8 +39,8 @@
 
 Make a copy of your Kallithea configuration (``.ini``) file.
 
-If you are using :ref:`rcextensions <customization>`, you should also
-make a copy of the entire ``rcextensions`` directory.
+If you are using custom :ref:`extensions <customization>`, you should also
+make a copy of the ``extensions.py`` file.
 
 Back up your database
 ^^^^^^^^^^^^^^^^^^^^^
@@ -225,14 +225,21 @@
     upgrade.
 
 
-10. Update Git repository hooks
--------------------------------
+10. Reinstall internal Git repository hooks
+-------------------------------------------
 
 It is possible that an upgrade involves changes to the Git hooks installed by
 Kallithea. As these hooks are created inside the repositories on the server
 filesystem, they are not updated automatically when upgrading Kallithea itself.
 
-To update the hooks of your Git repositories:
+To update the hooks of your Git repositories, run::
+
+    kallithea-cli repo-scan -c my.ini --install-git-hooks
+
+Watch out for warnings like ``skipping overwriting hook file X``, then fix it
+and rerun, or consider using ``--overwrite-git-hooks`` instead.
+
+Or:
 
 * Go to *Admin > Settings > Remap and Rescan*
 * Select the checkbox *Install Git hooks*
--- a/docs/usage/customization.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/usage/customization.rst	Thu May 27 21:27:37 2021 +0200
@@ -39,13 +39,14 @@
 .. _less: http://lesscss.org/
 
 
-Behavioral customization: rcextensions
---------------------------------------
+Behavioral customization: Kallithea extensions
+----------------------------------------------
 
-Some behavioral customization can be done in Python using ``rcextensions``, a
-custom Python package that can extend Kallithea functionality.
+Some behavioral customization can be done in Python using Kallithea
+``extensions``, a custom Python file you can create to extend Kallithea
+functionality.
 
-With ``rcextensions`` it's possible to add additional mappings for Whoosh
+With ``extensions`` it's possible to add additional mappings for Whoosh
 indexing and statistics, to add additional code into the push/pull/create/delete
 repository hooks (for example to send signals to build bots such as Jenkins) and
 even to monkey-patch certain parts of the Kallithea source code (for example
@@ -55,9 +56,14 @@
 
     kallithea-cli extensions-create -c my.ini
 
-This will create an ``rcextensions`` package next to the specified ``ini`` file.
-See the ``__init__.py`` file inside the generated ``rcextensions`` package
-for more details.
+This will create an ``extensions.py`` file next to the specified ``ini`` file.
+You can find more details inside this file.
+
+For compatibility with previous releases of Kallithea, a directory named
+``rcextensions`` with a file ``__init__.py`` inside of it can also be used. If
+both an ``extensions.py`` file and an ``rcextensions`` directory are found, only
+``extensions.py`` will be loaded. Note that the name ``rcextensions`` is
+deprecated and support for it will be removed in a future release.
 
 
 Behavioral customization: code changes
--- a/docs/usage/email.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/usage/email.rst	Thu May 27 21:27:37 2021 +0200
@@ -89,8 +89,8 @@
 References
 ----------
 
-- `Error Middleware (Pylons documentation) <http://pylons-webframework.readthedocs.org/en/latest/debugging.html#error-middleware>`_
-- `ErrorHandler (Pylons modules documentation) <http://pylons-webframework.readthedocs.org/en/latest/modules/middleware.html#pylons.middleware.ErrorHandler>`_
+- `Error Middleware (Pylons documentation) <https://pylons-webframework.readthedocs.io/en/latest/debugging.html#error-middleware>`_
+- `ErrorHandler (Pylons modules documentation) <https://pylons-webframework.readthedocs.io/en/latest/modules/middleware.html#pylons.middleware.ErrorHandler>`_
 
 
 .. _backlash: https://github.com/TurboGears/backlash
--- a/docs/usage/general.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/usage/general.rst	Thu May 27 21:27:37 2021 +0200
@@ -118,22 +118,15 @@
 
 Trending source files are calculated based on a predefined dictionary of known
 types and extensions. If an extension is missing or you would like to scan
-custom files, it is possible to extend the ``LANGUAGES_EXTENSIONS_MAP``
-dictionary located in ``kallithea/config/conf.py`` with new types.
+custom files, it is possible to add additional file extensions with
+``EXTRA_MAPPINGS`` in your custom Kallithea extensions.py file. See
+:ref:`customization`.
 
 
 Cloning remote repositories
 ---------------------------
 
 Kallithea has the ability to clone repositories from given remote locations.
-Currently it supports the following options:
-
-- hg  -> hg clone
-- svn -> hg clone
-- git -> git clone
-
-.. note:: svn -> hg cloning requires the ``hgsubversion`` library to be
-   installed.
 
 If you need to clone repositories that are protected via basic authentication,
 you can pass the credentials in the URL, e.g.
--- a/docs/usage/performance.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/usage/performance.rst	Thu May 27 21:27:37 2021 +0200
@@ -48,42 +48,37 @@
 Horizontal scaling
 ------------------
 
-Scaling horizontally means running several Kallithea instances and let them
-share the load. That can give huge performance benefits when dealing with large
-amounts of traffic (many users, CI servers, etc.). Kallithea can be scaled
-horizontally on one (recommended) or multiple machines.
+Scaling horizontally means running several Kallithea instances (also known as
+worker processes) and let them share the load. That is essential to serve other
+users while processing a long-running request from a user. Usually, the
+bottleneck on a Kallithea server is not CPU but I/O speed - especially network
+speed. It is thus a good idea to run multiple worker processes on one server.
 
-It is generally possible to run WSGI applications multithreaded, so that
-several HTTP requests are served from the same Python process at once. That can
-in principle give better utilization of internal caches and less process
-overhead.
+.. note::
 
-One danger of running multithreaded is that program execution becomes much more
-complex; programs must be written to consider all combinations of events and
-problems might depend on timing and be impossible to reproduce.
+    Kallithea and the embedded Mercurial backend are not thread-safe. Each
+    worker process must thus be single-threaded.
 
-Kallithea can't promise to be thread-safe, just like the embedded Mercurial
-backend doesn't make any strong promises when used as Kallithea uses it.
-Instead, we recommend scaling by using multiple server processes.
+Web servers can usually launch multiple worker processes - for example ``mod_wsgi`` with the
+``WSGIDaemonProcess`` ``processes`` parameter or ``uWSGI`` or ``gunicorn`` with
+their ``workers`` setting.
 
-Web servers with multiple worker processes (such as ``mod_wsgi`` with the
-``WSGIDaemonProcess`` ``processes`` parameter) will work out of the box.
-
+Kallithea can also be scaled horizontally across multiple machines.
 In order to scale horizontally on multiple machines, you need to do the
 following:
 
-    - Each instance's ``data`` storage needs to be configured to be stored on a
-      shared disk storage, preferably together with repositories. This ``data``
-      dir contains template caches, sessions, whoosh index and is used for
-      task locking (so it is safe across multiple instances). Set the
-      ``cache_dir``, ``index_dir``, ``beaker.cache.data_dir``, ``beaker.cache.lock_dir``
-      variables in each .ini file to a shared location across Kallithea instances
-    - If using several Celery instances,
-      the message broker should be common to all of them (e.g.,  one
-      shared RabbitMQ server)
-    - Load balance using round robin or IP hash, recommended is writing LB rules
-      that will separate regular user traffic from automated processes like CI
-      servers or build bots.
+- Each instance's ``data`` storage needs to be configured to be stored on a
+  shared disk storage, preferably together with repositories. This ``data``
+  dir contains template caches, sessions, whoosh index and is used for
+  task locking (so it is safe across multiple instances). Set the
+  ``cache_dir``, ``index_dir``, ``beaker.cache.data_dir``, ``beaker.cache.lock_dir``
+  variables in each .ini file to a shared location across Kallithea instances
+- If using several Celery instances,
+  the message broker should be common to all of them (e.g.,  one
+  shared RabbitMQ server)
+- Load balance using round robin or IP hash, recommended is writing LB rules
+  that will separate regular user traffic from automated processes like CI
+  servers or build bots.
 
 
 Serve static files directly from the web server
@@ -125,3 +120,6 @@
 
 
 .. _SQLAlchemyGrate: https://github.com/shazow/sqlalchemygrate
+.. _mod_wsgi: https://modwsgi.readthedocs.io/
+.. _uWSGI: https://uwsgi-docs.readthedocs.io/
+.. _gunicorn: http://pypi.python.org/pypi/gunicorn
--- a/docs/usage/troubleshooting.rst	Sun May 09 08:42:17 2021 +0200
+++ b/docs/usage/troubleshooting.rst	Thu May 27 21:27:37 2021 +0200
@@ -43,12 +43,19 @@
 |
 
 :Q: **How can I use hooks in Kallithea?**
-:A: It's easy if they are Python hooks: just use advanced link in
-    hooks section in Admin panel, that works only for Mercurial. If
-    you want to use Git hooks, just install th proper one in the repository,
-    e.g., create a file `/gitrepo/hooks/pre-receive`. You can also use
-    Kallithea-extensions to connect to callback hooks, for both Git
-    and Mercurial.
+:A: If using Mercurial, use *Admin > Settings > Hooks* to install
+    global hooks. Inside the hooks, you can use the current working directory to
+    control different behaviour for different repositories.
+
+    If using Git, install the hooks manually in each repository, for example by
+    creating a file ``gitrepo/hooks/pre-receive``.
+    Note that Kallithea uses the ``post-receive`` hook internally.
+    Kallithea will not work properly if another post-receive hook is installed instead.
+    You might also accidentally overwrite your own post-receive hook with the Kallithea hook.
+    Instead, put your post-receive hook in ``post-receive-custom``, and the Kallithea hook will invoke it.
+
+    You can also use Kallithea-extensions to connect to callback hooks,
+    for both Git and Mercurial.
 
 |
 
--- a/init.d/kallithea-daemon-debian	Sun May 09 08:42:17 2021 +0200
+++ b/init.d/kallithea-daemon-debian	Thu May 27 21:27:37 2021 +0200
@@ -37,7 +37,7 @@
 
 start() {
   echo "Starting $APP_NAME"
-  PYTHON_EGG_CACHE="/tmp" start-stop-daemon -d $APP_PATH \
+  start-stop-daemon -d $APP_PATH \
       --start --quiet \
       --pidfile $PID_PATH \
       --user $RUN_AS \
--- a/init.d/kallithea-daemon-gentoo	Sun May 09 08:42:17 2021 +0200
+++ b/init.d/kallithea-daemon-gentoo	Thu May 27 21:27:37 2021 +0200
@@ -33,7 +33,7 @@
 
 start() {
     ebegin "Starting $APP_NAME"
-    start-stop-daemon -d $APP_PATH -e PYTHON_EGG_CACHE="/tmp" \
+    start-stop-daemon -d $APP_PATH \
         --start --quiet \
         --pidfile $PID_PATH \
         --user $RUN_AS \
--- a/init.d/kallithea-daemon-redhat	Sun May 09 08:42:17 2021 +0200
+++ b/init.d/kallithea-daemon-redhat	Thu May 27 21:27:37 2021 +0200
@@ -63,7 +63,7 @@
 
 start_kallithea () {
     ensure_pid_dir
-    PYTHON_EGG_CACHE="/tmp" daemon --pidfile $PID_PATH \
+    daemon --pidfile $PID_PATH \
         --user $RUN_AS "$DAEMON $DAEMON_OPTS"
     RETVAL=$?
     [ $RETVAL -eq 0 ] && touch $LOCK_FILE
--- a/kallithea/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -30,20 +30,26 @@
 import platform
 import sys
 
+import celery
+
 
 if sys.version_info < (3, 6):
     raise Exception('Kallithea requires python 3.6 or later')
 
-VERSION = (0, 6, 3)
+VERSION = (0, 6, 99)
 BACKENDS = {
     'hg': 'Mercurial repository',
     'git': 'Git repository',
 }
 
-CELERY_APP = None  # set to Celery app instance if using Celery
-CELERY_EAGER = False
+CELERY_APP = celery.Celery()  # needed at import time but is lazy and can be configured later
 
-CONFIG = {}
+DEFAULT_USER_ID: int  # set by setup_configuration
+CONFIG = {}  # set to tg.config when TG app is initialized and calls app_cfg
+
+# URL prefix for non repository related links - must start with `/`
+ADMIN_PREFIX = '/_admin'
+URL_SEP = '/'
 
 # Linked module for extensions
 EXTENSIONS = {}
--- a/kallithea/alembic/env.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/alembic/env.py	Thu May 27 21:27:37 2021 +0200
@@ -21,7 +21,7 @@
 from alembic import context
 from sqlalchemy import engine_from_config, pool
 
-from kallithea.model import db
+from kallithea.model import meta
 
 
 # The alembic.config.Config object, which wraps the current .ini file.
@@ -93,7 +93,7 @@
 
             # Support autogeneration of migration scripts based on "diff" between
             # current database schema and kallithea.model.db schema.
-            target_metadata=db.Base.metadata,
+            target_metadata=meta.Base.metadata,
             include_object=include_in_autogeneration,
             render_as_batch=True, # batch mode is needed for SQLite support
         )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/alembic/versions/7ba0d2cad930_hooks_migrate_internal_hooks_to_.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,57 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""hooks: migrate internal hooks to kallithea namespace
+
+Revision ID: 7ba0d2cad930
+Revises: f62826179f39
+Create Date: 2021-01-11 00:10:13.576586
+
+"""
+
+# The following opaque hexadecimal identifiers ("revisions") are used
+# by Alembic to track this migration script and its relations to others.
+revision = '7ba0d2cad930'
+down_revision = 'f62826179f39'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+from sqlalchemy import MetaData, Table
+
+from kallithea.model import db
+
+
+meta = MetaData()
+
+
+def upgrade():
+    meta.bind = op.get_bind()
+    ui = Table(db.Ui.__tablename__, meta, autoload=True)
+
+    ui.update(values={
+        'ui_key': 'changegroup.kallithea_update',
+        'ui_value': 'python:',  # value in db isn't used
+    }).where(ui.c.ui_key == 'changegroup.update').execute()
+    ui.update(values={
+        'ui_key': 'changegroup.kallithea_repo_size',
+        'ui_value': 'python:',  # value in db isn't used
+    }).where(ui.c.ui_key == 'changegroup.repo_size').execute()
+
+    # 642847355a10 moved these hooks out of db - remove old entries
+    ui.delete().where(ui.c.ui_key == 'changegroup.push_logger').execute()
+    ui.delete().where(ui.c.ui_key == 'outgoing.pull_logger').execute()
+
+
+def downgrade():
+    pass
--- a/kallithea/alembic/versions/a020f7044fd6_rename_hooks.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/alembic/versions/a020f7044fd6_rename_hooks.py	Thu May 27 21:27:37 2021 +0200
@@ -29,7 +29,7 @@
 from alembic import op
 from sqlalchemy import MetaData, Table
 
-from kallithea.model.db import Ui
+from kallithea.model import db
 
 
 meta = MetaData()
@@ -37,7 +37,7 @@
 
 def upgrade():
     meta.bind = op.get_bind()
-    ui = Table(Ui.__tablename__, meta, autoload=True)
+    ui = Table(db.Ui.__tablename__, meta, autoload=True)
 
     ui.update(values={
         'ui_key': 'prechangegroup.push_lock_handling',
@@ -51,7 +51,7 @@
 
 def downgrade():
     meta.bind = op.get_bind()
-    ui = Table(Ui.__tablename__, meta, autoload=True)
+    ui = Table(db.Ui.__tablename__, meta, autoload=True)
 
     ui.update(values={
         'ui_key': 'prechangegroup.pre_push',
--- a/kallithea/alembic/versions/ad357ccd9521_drop_locking.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/alembic/versions/ad357ccd9521_drop_locking.py	Thu May 27 21:27:37 2021 +0200
@@ -30,7 +30,7 @@
 from alembic import op
 from sqlalchemy import MetaData, Table
 
-from kallithea.model.db import Ui
+from kallithea.model import db
 
 
 meta = MetaData()
@@ -45,7 +45,7 @@
         batch_op.drop_column('enable_locking')
 
     meta.bind = op.get_bind()
-    ui = Table(Ui.__tablename__, meta, autoload=True)
+    ui = Table(db.Ui.__tablename__, meta, autoload=True)
     ui.delete().where(ui.c.ui_key == 'prechangegroup.push_lock_handling').execute()
     ui.delete().where(ui.c.ui_key == 'preoutgoing.pull_lock_handling').execute()
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/alembic/versions/f62826179f39_add_unique_constraint_on_.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,73 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""add unique constraint on PullRequestReviewer
+
+Revision ID: f62826179f39
+Revises: a0a1bf09c143
+Create Date: 2020-06-15 12:30:37.420321
+
+"""
+
+# The following opaque hexadecimal identifiers ("revisions") are used
+# by Alembic to track this migration script and its relations to others.
+revision = 'f62826179f39'
+down_revision = 'a0a1bf09c143'
+branch_labels = None
+depends_on = None
+
+import sqlalchemy as sa
+from alembic import op
+
+from kallithea.model import db
+
+
+def upgrade():
+    session = sa.orm.session.Session(bind=op.get_bind())
+
+    # there may be existing duplicates in the database, remove them first
+
+    seen = set()
+    # duplicate_values contains one copy of each duplicated pair
+    duplicate_values = (
+        session
+        .query(db.PullRequestReviewer.pull_request_id, db.PullRequestReviewer.user_id)
+        .group_by(db.PullRequestReviewer.pull_request_id, db.PullRequestReviewer.user_id)
+        .having(sa.func.count(db.PullRequestReviewer.pull_request_reviewers_id) > 1)
+    )
+
+    for pull_request_id, user_id in duplicate_values:
+        # duplicate_occurrences contains all db records of the duplicate_value
+        # currently being processed
+        duplicate_occurrences = (
+            session
+            .query(db.PullRequestReviewer)
+            .filter(db.PullRequestReviewer.pull_request_id == pull_request_id)
+            .filter(db.PullRequestReviewer.user_id == user_id)
+        )
+        for prr in duplicate_occurrences:
+            if (pull_request_id, user_id) in seen:
+                session.delete(prr)
+            else:
+                seen.add((pull_request_id, user_id))
+
+    session.commit()
+
+    # after deleting all duplicates, add the unique constraint
+    with op.batch_alter_table('pull_request_reviewers', schema=None) as batch_op:
+        batch_op.create_unique_constraint(batch_op.f('uq_pull_request_reviewers_pull_request_id'), ['pull_request_id', 'user_id'])
+
+
+def downgrade():
+    with op.batch_alter_table('pull_request_reviewers', schema=None) as batch_op:
+        batch_op.drop_constraint(batch_op.f('uq_pull_request_reviewers_pull_request_id'), type_='unique')
--- a/kallithea/bin/kallithea_cli_base.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_base.py	Thu May 27 21:27:37 2021 +0200
@@ -23,7 +23,7 @@
 import paste.deploy
 
 import kallithea
-import kallithea.config.middleware
+import kallithea.config.application
 
 
 # kallithea_cli is usually invoked through the 'kallithea-cli' wrapper script
@@ -53,10 +53,10 @@
 def cli():
     """Various commands to manage a Kallithea instance."""
 
-def register_command(config_file=False, config_file_initialize_app=False, hidden=False):
+def register_command(needs_config_file=False, config_file_initialize_app=False, hidden=False):
     """Register a kallithea-cli subcommand.
 
-    If one of the config_file flags are true, a config file must be specified
+    If one of the needs_config_file flags are true, a config file must be specified
     with -c and it is read and logging is configured. The configuration is
     available in the kallithea.CONFIG dict.
 
@@ -64,21 +64,23 @@
     (including tg.config), and database access will also be fully initialized.
     """
     cli_command = cli.command(hidden=hidden)
-    if config_file or config_file_initialize_app:
+    if needs_config_file or config_file_initialize_app:
         def annotator(annotated):
             @click.option('--config_file', '-c', help="Path to .ini file with app configuration.",
                 type=click.Path(dir_okay=False, exists=True, readable=True), required=True)
             @functools.wraps(annotated) # reuse meta data from the wrapped function so click can see other options
             def runtime_wrapper(config_file, *args, **kwargs):
                 path_to_ini_file = os.path.realpath(config_file)
-                kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file)
+                config = paste.deploy.appconfig('config:' + path_to_ini_file)
                 cp = configparser.ConfigParser(strict=False)
                 cp.read_string(read_config(path_to_ini_file, strip_section_prefix=annotated.__name__))
                 logging.config.fileConfig(cp,
                     {'__file__': path_to_ini_file, 'here': os.path.dirname(path_to_ini_file)})
+                if needs_config_file:
+                    annotated(*args, config=config, **kwargs)
                 if config_file_initialize_app:
-                    kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
-                return annotated(*args, **kwargs)
+                    kallithea.config.application.make_app(config.global_conf, **config.local_conf)
+                    annotated(*args, **kwargs)
             return cli_command(runtime_wrapper)
         return annotator
     return cli_command
--- a/kallithea/bin/kallithea_cli_celery.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_celery.py	Thu May 27 21:27:37 2021 +0200
@@ -12,16 +12,18 @@
 # You should have received a copy of the GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-import celery.bin.worker
 import click
+from celery.bin.celery import celery as celery_command
 
 import kallithea
 import kallithea.bin.kallithea_cli_base as cli_base
+from kallithea.lib import celery_app
+from kallithea.lib.utils2 import asbool
 
 
-@cli_base.register_command(config_file_initialize_app=True)
+@cli_base.register_command(needs_config_file=True)
 @click.argument('celery_args', nargs=-1)
-def celery_run(celery_args):
+def celery_run(celery_args, config):
     """Start Celery worker(s) for asynchronous tasks.
 
     This commands starts the Celery daemon which will spawn workers to handle
@@ -32,9 +34,20 @@
     by this CLI command.
     """
 
-    if not kallithea.CELERY_APP:
+    if not asbool(config.get('use_celery')):
         raise Exception('Please set use_celery = true in .ini config '
                         'file before running this command')
 
-    cmd = celery.bin.worker.worker(kallithea.CELERY_APP)
-    return cmd.run_from_argv(None, command='celery-run -c CONFIG_FILE --', argv=list(celery_args))
+    kallithea.CELERY_APP.config_from_object(celery_app.make_celery_config(config))
+
+    kallithea.CELERY_APP.loader.on_worker_process_init = lambda: kallithea.config.application.make_app(config.global_conf, **config.local_conf)
+
+    args = list(celery_args)
+    # args[0] is generally ignored when prog_name is specified, but -h *needs* it to be 'worker' ... but will also suggest that users specify 'worker' explicitly
+    if not args or args[0] != 'worker':
+        args.insert(0, 'worker')
+
+    # inline kallithea.CELERY_APP.start in order to allow specifying prog_name
+    assert celery_command.params[0].name == 'app'
+    celery_command.params[0].default = kallithea.CELERY_APP
+    celery_command.main(args=args, prog_name='kallithea-cli celery-run -c CONFIG_FILE --')
--- a/kallithea/bin/kallithea_cli_config.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_config.py	Thu May 27 21:27:37 2021 +0200
@@ -21,7 +21,7 @@
 import mako.exceptions
 
 import kallithea.bin.kallithea_cli_base as cli_base
-import kallithea.lib.locale
+import kallithea.lib.locales
 from kallithea.lib import inifile
 
 
@@ -66,7 +66,7 @@
         'git_hook_interpreter': sys.executable,
         'user_home_path': os.path.expanduser('~'),
         'kallithea_cli_path': cli_base.kallithea_cli_path,
-        'ssh_locale': kallithea.lib.locale.get_current_locale(),
+        'ssh_locale': kallithea.lib.locales.get_current_locale(),
     }
     ini_settings = defaultdict(dict)
 
--- a/kallithea/bin/kallithea_cli_db.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_db.py	Thu May 27 21:27:37 2021 +0200
@@ -15,11 +15,15 @@
 
 import kallithea
 import kallithea.bin.kallithea_cli_base as cli_base
+import kallithea.lib.utils
+import kallithea.model.scm
 from kallithea.lib.db_manage import DbManage
-from kallithea.model.meta import Session
+from kallithea.model import meta
 
 
-@cli_base.register_command(config_file=True)
+@cli_base.register_command(needs_config_file=True, config_file_initialize_app=True)
+@click.option('--reuse/--no-reuse', default=False,
+        help='Reuse and clean existing database instead of dropping and creating (default: no reuse)')
 @click.option('--user', help='Username of administrator account.')
 @click.option('--password', help='Password for administrator account.')
 @click.option('--email', help='Email address of administrator account.')
@@ -28,7 +32,7 @@
 @click.option('--force-no', is_flag=True, help='Answer no to every question.')
 @click.option('--public-access/--no-public-access', default=True,
         help='Enable/disable public access on this installation (default: enable)')
-def db_create(user, password, email, repos, force_yes, force_no, public_access):
+def db_create(user, password, email, repos, force_yes, force_no, public_access, reuse, config=None):
     """Initialize the database.
 
     Create all required tables in the database specified in the configuration
@@ -37,44 +41,43 @@
 
     You can pass the answers to all questions as options to this command.
     """
-    dbconf = kallithea.CONFIG['sqlalchemy.url']
+    if config is not None:  # first called with config, before app initialization
+        dbconf = config['sqlalchemy.url']
 
-    # force_ask should be True (yes), False (no), or None (ask)
-    if force_yes:
-        force_ask = True
-    elif force_no:
-        force_ask = False
-    else:
-        force_ask = None
+        # force_ask should be True (yes), False (no), or None (ask)
+        if force_yes:
+            force_ask = True
+        elif force_no:
+            force_ask = False
+        else:
+            force_ask = None
 
-    cli_args = dict(
-            username=user,
-            password=password,
-            email=email,
-            repos_location=repos,
-            force_ask=force_ask,
-            public_access=public_access,
-    )
-    dbmanage = DbManage(dbconf=dbconf, root=kallithea.CONFIG['here'],
-                        tests=False, cli_args=cli_args)
-    dbmanage.create_tables(override=True)
-    repo_root_path = dbmanage.prompt_repo_root_path(None)
-    dbmanage.create_settings(repo_root_path)
-    dbmanage.create_default_user()
-    dbmanage.admin_prompt()
-    dbmanage.create_permissions()
-    dbmanage.populate_default_permissions()
-    Session().commit()
+        cli_args = dict(
+                username=user,
+                password=password,
+                email=email,
+                repos_location=repos,
+                force_ask=force_ask,
+                public_access=public_access,
+        )
+        dbmanage = DbManage(dbconf=dbconf, root=config['here'],
+                            cli_args=cli_args)
+        dbmanage.create_tables(reuse_database=reuse)
+        repo_root_path = dbmanage.prompt_repo_root_path(None)
+        dbmanage.create_settings(repo_root_path)
+        dbmanage.create_default_user()
+        dbmanage.create_admin_user()
+        dbmanage.create_permissions()
+        dbmanage.populate_default_permissions()
+        meta.Session().commit()
 
-    # initial repository scan
-    kallithea.config.middleware.make_app(
-            kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
-    added, _ = kallithea.lib.utils.repo2db_mapper(kallithea.model.scm.ScmModel().repo_scan())
-    if added:
-        click.echo('Initial repository scan: added following repositories:')
-        click.echo('\t%s' % '\n\t'.join(added))
-    else:
-        click.echo('Initial repository scan: no repositories found.')
+    else:  # then called again after app initialization
+        added, _ = kallithea.lib.utils.repo2db_mapper(kallithea.model.scm.ScmModel().repo_scan())
+        if added:
+            click.echo('Initial repository scan: added following repositories:')
+            click.echo('\t%s' % '\n\t'.join(added))
+        else:
+            click.echo('Initial repository scan: no repositories found.')
 
-    click.echo('Database set up successfully.')
-    click.echo("Don't forget to build the front-end using 'kallithea-cli front-end-build'.")
+        click.echo('Database set up successfully.')
+        click.echo("Don't forget to build the front-end using 'kallithea-cli front-end-build'.")
--- a/kallithea/bin/kallithea_cli_extensions.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_extensions.py	Thu May 27 21:27:37 2021 +0200
@@ -24,24 +24,23 @@
 import click
 import pkg_resources
 
-import kallithea
 import kallithea.bin.kallithea_cli_base as cli_base
 from kallithea.lib.utils2 import ask_ok
 
 
-@cli_base.register_command(config_file=True)
-def extensions_create():
+@cli_base.register_command(needs_config_file=True)
+def extensions_create(config):
     """Write template file for extending Kallithea in Python.
 
-    An rcextensions directory with a __init__.py file will be created next to
-    the ini file. Local customizations in that file will survive upgrades.
-    The file contains instructions on how it can be customized.
+    Create a template `extensions.py` file next to the ini file. Local
+    customizations in that file will survive upgrades. The file contains
+    instructions on how it can be customized.
     """
-    here = kallithea.CONFIG['here']
+    here = config['here']
     content = pkg_resources.resource_string(
-        'kallithea', os.path.join('config', 'rcextensions', '__init__.py')
+        'kallithea', os.path.join('templates', 'py', 'extensions.py')
     )
-    ext_file = os.path.join(here, 'rcextensions', '__init__.py')
+    ext_file = os.path.join(here, 'extensions.py')
     if os.path.exists(ext_file):
         msg = ('Extension file %s already exists, do you want '
                'to overwrite it ? [y/n] ') % ext_file
--- a/kallithea/bin/kallithea_cli_iis.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_iis.py	Thu May 27 21:27:37 2021 +0200
@@ -16,7 +16,6 @@
 
 import click
 
-import kallithea
 import kallithea.bin.kallithea_cli_base as cli_base
 
 
@@ -57,16 +56,16 @@
     HandleCommandLine(params)
 '''
 
-@cli_base.register_command(config_file=True)
+@cli_base.register_command(needs_config_file=True)
 @click.option('--virtualdir', default='/',
         help='The virtual folder to install into on IIS.')
-def iis_install(virtualdir):
+def iis_install(virtualdir, config):
     """Install into IIS using isapi-wsgi."""
 
-    config_file_abs = kallithea.CONFIG['__file__']
+    config_file_abs = config['__file__']
 
     try:
-        import isapi_wsgi
+        import isapi_wsgi  # pytype: disable=import-error
         assert isapi_wsgi
     except ImportError:
         sys.stderr.write('missing requirement: isapi-wsgi not installed\n')
--- a/kallithea/bin/kallithea_cli_index.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_index.py	Thu May 27 21:27:37 2021 +0200
@@ -28,7 +28,7 @@
 import kallithea.bin.kallithea_cli_base as cli_base
 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 from kallithea.lib.pidlock import DaemonLock, LockHeld
-from kallithea.lib.utils import load_rcextensions
+from kallithea.lib.utils import load_extensions
 from kallithea.model.repo import RepoModel
 
 
@@ -41,7 +41,7 @@
     """Create or update full text search index"""
 
     index_location = kallithea.CONFIG['index_dir']
-    load_rcextensions(kallithea.CONFIG['here'])
+    load_extensions(kallithea.CONFIG['here'])
 
     if not repo_location:
         repo_location = RepoModel().repos_path
--- a/kallithea/bin/kallithea_cli_repo.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_repo.py	Thu May 27 21:27:37 2021 +0200
@@ -30,15 +30,18 @@
 import kallithea.bin.kallithea_cli_base as cli_base
 from kallithea.lib.utils import REMOVED_REPO_PAT, repo2db_mapper
 from kallithea.lib.utils2 import ask_ok
-from kallithea.model.db import Repository
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.scm import ScmModel
 
 
 @cli_base.register_command(config_file_initialize_app=True)
 @click.option('--remove-missing', is_flag=True,
         help='Remove missing repositories from the Kallithea database.')
-def repo_scan(remove_missing):
+@click.option('--install-git-hooks', is_flag=True,
+        help='(Re)install Kallithea Git hooks without overwriting other hooks.')
+@click.option('--overwrite-git-hooks', is_flag=True,
+        help='(Re)install Kallithea Git hooks, overwriting other hooks.')
+def repo_scan(remove_missing, install_git_hooks, overwrite_git_hooks):
     """Scan filesystem for repositories.
 
     Search the configured repository root for new repositories and add them
@@ -49,7 +52,9 @@
     """
     click.echo('Now scanning root location for new repos ...')
     added, removed = repo2db_mapper(ScmModel().repo_scan(),
-                                    remove_obsolete=remove_missing)
+                                    remove_obsolete=remove_missing,
+                                    install_git_hooks=install_git_hooks,
+                                    overwrite_git_hooks=overwrite_git_hooks)
     click.echo('Scan completed.')
     if added:
         click.echo('Added: %s' % ', '.join(added))
@@ -73,11 +78,11 @@
     updated.
     """
     if not repositories:
-        repo_list = Repository.query().all()
+        repo_list = db.Repository.query().all()
     else:
         repo_names = [n.strip() for n in repositories]
-        repo_list = list(Repository.query()
-                        .filter(Repository.repo_name.in_(repo_names)))
+        repo_list = list(db.Repository.query()
+                        .filter(db.Repository.repo_name.in_(repo_names)))
 
     for repo in repo_list:
         # update latest revision metadata in database
@@ -86,7 +91,7 @@
         # first access
         repo.set_invalidate()
 
-    Session().commit()
+    meta.Session().commit()
 
     click.echo('Updated database with information about latest change in the following %s repositories:' % (len(repo_list)))
     click.echo('\n'.join(repo.repo_name for repo in repo_list))
--- a/kallithea/bin/kallithea_cli_ssh.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/bin/kallithea_cli_ssh.py	Thu May 27 21:27:37 2021 +0200
@@ -21,9 +21,9 @@
 
 import kallithea
 import kallithea.bin.kallithea_cli_base as cli_base
-from kallithea.lib.utils2 import str2bool
-from kallithea.lib.vcs.backends.git.ssh import GitSshHandler
-from kallithea.lib.vcs.backends.hg.ssh import MercurialSshHandler
+from kallithea.lib.utils2 import asbool
+from kallithea.lib.vcs.ssh.git import GitSshHandler
+from kallithea.lib.vcs.ssh.hg import MercurialSshHandler
 from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException
 
 
@@ -40,8 +40,7 @@
     protocol access. The access will be granted as the specified user ID, and
     logged as using the specified key ID.
     """
-    ssh_enabled = kallithea.CONFIG.get('ssh_enabled', False)
-    if not str2bool(ssh_enabled):
+    if not asbool(kallithea.CONFIG.get('ssh_enabled', False)):
         sys.stderr.write("SSH access is disabled.\n")
         return sys.exit(1)
 
@@ -70,7 +69,7 @@
         vcs_handler = VcsHandler.make(ssh_command_parts)
         if vcs_handler is not None:
             vcs_handler.serve(user_id, key_id, client_ip)
-            assert False # serve is written so it never will terminate
+            sys.exit(0)
 
     sys.stderr.write("This account can only be used for repository access. SSH command %r is not supported.\n" % ssh_original_command)
     sys.exit(1)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/bin/vcs_hooks.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,203 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.bin.vcs_hooks
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Entry points for Kallithea hooking into Mercurial and Git.
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: Aug 6, 2010
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+"""
+
+import logging
+import os
+import sys
+
+import mercurial.hg
+import mercurial.scmutil
+import paste.deploy
+
+import kallithea
+import kallithea.config.application
+from kallithea.lib import hooks, webutils
+from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str
+from kallithea.lib.vcs.backends.base import EmptyChangeset
+from kallithea.lib.vcs.utils.helpers import get_scm_size
+from kallithea.model import db
+
+
+log = logging.getLogger(__name__)
+
+
+def repo_size(ui, repo, hooktype=None, **kwargs):
+    """Show size of Mercurial repository.
+
+    Called as Mercurial hook changegroup.kallithea_repo_size after push.
+    """
+    size_hg, size_root = get_scm_size('.hg', safe_str(repo.root))
+
+    last_cs = repo[len(repo) - 1]
+
+    msg = ('Repository size .hg: %s Checkout: %s Total: %s\n'
+           'Last revision is now r%s:%s\n') % (
+        webutils.format_byte_size(size_hg),
+        webutils.format_byte_size(size_root),
+        webutils.format_byte_size(size_hg + size_root),
+        last_cs.rev(),
+        ascii_str(last_cs.hex())[:12],
+    )
+    ui.status(safe_bytes(msg))
+
+
+def update(ui, repo, hooktype=None, **kwargs):
+    """Update repo after push. The equivalent to 'hg update' but using the same
+    Mercurial as everything else.
+
+    Called as Mercurial hook changegroup.kallithea_update after push.
+    """
+    try:
+        ui.pushbuffer(error=True, subproc=True)
+        rev = brev = None
+        mercurial.hg.updatetotally(ui, repo, rev, brev)
+    finally:
+        s = ui.popbuffer()  # usually just "x files updated, x files merged, x files removed, x files unresolved"
+        log.info('%s update hook output: %s', safe_str(repo.root), safe_str(s).rstrip())
+
+
+def pull_action(ui, repo, **kwargs):
+    """Logs user pull action
+
+    Called as Mercurial hook outgoing.kallithea_pull_action.
+    """
+    hooks.log_pull_action()
+
+
+def push_action(ui, repo, node, node_last, **kwargs):
+    """
+    Register that changes have been added to the repo - log the action *and* invalidate caches.
+    Note: This hook is not only logging, but also the side effect invalidating
+    caches! The function should perhaps be renamed.
+
+    Called as Mercurial hook changegroup.kallithea_push_action .
+
+    The pushed changesets is given by the revset 'node:node_last'.
+    """
+    revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])]
+    hooks.process_pushed_raw_ids(revs)
+
+
+def _git_hook_environment(repo_path):
+    """
+    Create a light-weight environment for stand-alone scripts and return an UI and the
+    db repository.
+
+    Git hooks are executed as subprocess of Git while Kallithea is waiting, and
+    they thus need enough info to be able to create an app environment and
+    connect to the database.
+    """
+    extras = get_hook_environment()
+
+    path_to_ini_file = extras['config']
+    config = paste.deploy.appconfig('config:' + path_to_ini_file)
+    #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging
+    kallithea.config.application.make_app(config.global_conf, **config.local_conf)
+
+    # fix if it's not a bare repo
+    if repo_path.endswith(os.sep + '.git'):
+        repo_path = repo_path[:-5]
+
+    repo = db.Repository.get_by_full_path(repo_path)
+    if not repo:
+        raise OSError('Repository %s not found in database' % repo_path)
+
+    return repo
+
+
+def post_receive(repo_path, git_stdin_lines):
+    """Called from Git post-receive hook.
+    The returned value is used as hook exit code and must be 0.
+    """
+    try:
+        repo = _git_hook_environment(repo_path)
+    except HookEnvironmentError as e:
+        sys.stderr.write("Skipping Kallithea Git post-receive hook %r.\nGit was apparently not invoked by Kallithea: %s\n" % (sys.argv[0], e))
+        return 0
+
+    # the post push hook should never use the cached instance
+    scm_repo = repo.scm_instance_no_cache()
+
+    rev_data = []
+    for l in git_stdin_lines:
+        old_rev, new_rev, ref = l.strip().split(' ')
+        _ref_data = ref.split('/')
+        if _ref_data[1] in ['tags', 'heads']:
+            rev_data.append({'old_rev': old_rev,
+                             'new_rev': new_rev,
+                             'ref': ref,
+                             'type': _ref_data[1],
+                             'name': '/'.join(_ref_data[2:])})
+
+    git_revs = []
+    for push_ref in rev_data:
+        _type = push_ref['type']
+        if _type == 'heads':
+            if push_ref['old_rev'] == EmptyChangeset().raw_id:
+                # update the symbolic ref if we push new repo
+                if scm_repo.is_empty():
+                    scm_repo._repo.refs.set_symbolic_ref(
+                        b'HEAD',
+                        b'refs/heads/%s' % safe_bytes(push_ref['name']))
+
+                # build exclude list without the ref
+                cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*']
+                stdout = scm_repo.run_git_command(cmd)
+                ref = push_ref['ref']
+                heads = [head for head in stdout.splitlines() if head != ref]
+                # now list the git revs while excluding from the list
+                cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H']
+                cmd.append('--not')
+                cmd.extend(heads) # empty list is ok
+                stdout = scm_repo.run_git_command(cmd)
+                git_revs += stdout.splitlines()
+
+            elif push_ref['new_rev'] == EmptyChangeset().raw_id:
+                # delete branch case
+                git_revs += ['delete_branch=>%s' % push_ref['name']]
+            else:
+                cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref,
+                       '--reverse', '--pretty=format:%H']
+                stdout = scm_repo.run_git_command(cmd)
+                git_revs += stdout.splitlines()
+
+        elif _type == 'tags':
+            git_revs += ['tag=>%s' % push_ref['name']]
+
+    hooks.process_pushed_raw_ids(git_revs)
+
+    return 0
+
+
+# Almost exactly like Mercurial contrib/hg-ssh:
+def rejectpush(ui, **kwargs):
+    """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos.
+    Return value 1 will make the hook fail and reject the push.
+    """
+    ex = get_hook_environment()
+    ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository))
+    return 1
--- a/kallithea/config/app_cfg.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/config/app_cfg.py	Thu May 27 21:27:37 2021 +0200
@@ -28,82 +28,57 @@
 from alembic.migration import MigrationContext
 from alembic.script.base import ScriptDirectory
 from sqlalchemy import create_engine
-from tg.configuration import AppConfig
-from tg.support.converters import asbool
+from tg import FullStackApplicationConfigurator
 
-import kallithea.lib.locale
+import kallithea.lib.locales
 import kallithea.model.base
 import kallithea.model.meta
-from kallithea.lib import celerypylons
-from kallithea.lib.middleware.https_fixup import HttpsFixup
-from kallithea.lib.middleware.permanent_repo_url import PermanentRepoUrl
-from kallithea.lib.middleware.simplegit import SimpleGit
-from kallithea.lib.middleware.simplehg import SimpleHg
-from kallithea.lib.middleware.wrapper import RequestWrapper
-from kallithea.lib.utils import check_git_version, load_rcextensions, set_app_settings, set_indexer_config, set_vcs_config
-from kallithea.lib.utils2 import str2bool
+from kallithea.lib import celery_app
+from kallithea.lib.utils import load_extensions, set_app_settings, set_indexer_config, set_vcs_config
+from kallithea.lib.utils2 import asbool, check_git_version
 from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
 
 
-class KallitheaAppConfig(AppConfig):
-    # Note: AppConfig has a misleading name, as it's not the application
-    # configuration, but the application configurator. The AppConfig values are
-    # used as a template to create the actual configuration, which might
-    # overwrite or extend the one provided by the configurator template.
+base_config = FullStackApplicationConfigurator()
 
-    # To make it clear, AppConfig creates the config and sets into it the same
-    # values that AppConfig itself has. Then the values from the config file and
-    # gearbox options are loaded and merged into the configuration. Then an
-    # after_init_config(conf) method of AppConfig is called for any change that
-    # might depend on options provided by configuration files.
+base_config.update_blueprint({
+    'package': kallithea,
 
-    def __init__(self):
-        super(KallitheaAppConfig, self).__init__()
-
-        self['package'] = kallithea
+    # Rendering Engines Configuration
+    'renderers': [
+        'json',
+        'mako',
+    ],
+    'default_renderer': 'mako',
+    'use_dotted_templatenames': False,
 
-        self['prefer_toscawidgets2'] = False
-        self['use_toscawidgets'] = False
-
-        self['renderers'] = []
-
-        # Enable json in expose
-        self['renderers'].append('json')
+    # Configure Sessions, store data as JSON to avoid pickle security issues
+    'session.enabled': True,
+    'session.data_serializer': 'json',
 
-        # Configure template rendering
-        self['renderers'].append('mako')
-        self['default_renderer'] = 'mako'
-        self['use_dotted_templatenames'] = False
+    # Configure the base SQLALchemy Setup
+    'use_sqlalchemy': True,
+    'model': kallithea.model.base,
+    'DBSession': kallithea.model.meta.Session,
 
-        # Configure Sessions, store data as JSON to avoid pickle security issues
-        self['session.enabled'] = True
-        self['session.data_serializer'] = 'json'
-
-        # Configure the base SQLALchemy Setup
-        self['use_sqlalchemy'] = True
-        self['model'] = kallithea.model.base
-        self['DBSession'] = kallithea.model.meta.Session
+    # Configure App without an authentication backend.
+    'auth_backend': None,
 
-        # Configure App without an authentication backend.
-        self['auth_backend'] = None
-
-        # Use custom error page for these errors. By default, Turbogears2 does not add
-        # 400 in this list.
-        # Explicitly listing all is considered more robust than appending to defaults,
-        # in light of possible future framework changes.
-        self['errorpage.status_codes'] = [400, 401, 403, 404]
+    # Use custom error page for these errors. By default, Turbogears2 does not add
+    # 400 in this list.
+    # Explicitly listing all is considered more robust than appending to defaults,
+    # in light of possible future framework changes.
+    'errorpage.status_codes': [400, 401, 403, 404],
 
-        # Disable transaction manager -- currently Kallithea takes care of transactions itself
-        self['tm.enabled'] = False
+    # Disable transaction manager -- currently Kallithea takes care of transactions itself
+    'tm.enabled': False,
 
-        # Set the default i18n source language so TG doesn't search beyond 'en' in Accept-Language.
-        self['i18n.lang'] = 'en'
-
-
-base_config = KallitheaAppConfig()
+    # Set the default i18n source language so TG doesn't search beyond 'en' in Accept-Language.
+    'i18n.lang': 'en',
+})
 
 # DebugBar, a debug toolbar for TurboGears2.
 # (https://github.com/TurboGears/tgext.debugbar)
@@ -111,20 +86,20 @@
 # 'debug = true' (not in production!)
 # See the Kallithea documentation for more information.
 try:
+    import kajiki  # only to check its existence
     from tgext.debugbar import enable_debugbar
-    import kajiki # only to check its existence
     assert kajiki
 except ImportError:
     pass
 else:
-    base_config['renderers'].append('kajiki')
+    base_config.get_blueprint_value('renderers').append('kajiki')
     enable_debugbar(base_config)
 
 
 def setup_configuration(app):
     config = app.config
 
-    if not kallithea.lib.locale.current_locale_is_valid():
+    if not kallithea.lib.locales.current_locale_is_valid():
         log.error("Terminating ...")
         sys.exit(1)
 
@@ -134,7 +109,7 @@
         mercurial.encoding.encoding = hgencoding
 
     if config.get('ignore_alembic_revision', False):
-        log.warn('database alembic revision checking is disabled')
+        log.warning('database alembic revision checking is disabled')
     else:
         dbconf = config['sqlalchemy.url']
         alembic_cfg = alembic.config.Config()
@@ -160,11 +135,11 @@
     # store some globals into kallithea
     kallithea.DEFAULT_USER_ID = db.User.get_default_user().user_id
 
-    if str2bool(config.get('use_celery')):
-        kallithea.CELERY_APP = celerypylons.make_app()
+    if asbool(config.get('use_celery')) and not kallithea.CELERY_APP.finalized:
+        kallithea.CELERY_APP.config_from_object(celery_app.make_celery_config(config))
     kallithea.CONFIG = config
 
-    load_rcextensions(root_path=config['here'])
+    load_extensions(root_path=config['here'])
 
     set_app_settings(config)
 
@@ -188,27 +163,3 @@
 
 
 tg.hooks.register('configure_new_app', setup_configuration)
-
-
-def setup_application(app):
-    config = app.config
-
-    # we want our low level middleware to get to the request ASAP. We don't
-    # need any stack middleware in them - especially no StatusCodeRedirect buffering
-    app = SimpleHg(app, config)
-    app = SimpleGit(app, config)
-
-    # Enable https redirects based on HTTP_X_URL_SCHEME set by proxy
-    if any(asbool(config.get(x)) for x in ['https_fixup', 'force_https', 'use_htsts']):
-        app = HttpsFixup(app, config)
-
-    app = PermanentRepoUrl(app, config)
-
-    # Optional and undocumented wrapper - gives more verbose request/response logging, but has a slight overhead
-    if str2bool(config.get('use_wsgi_wrapper')):
-        app = RequestWrapper(app, config)
-
-    return app
-
-
-tg.hooks.register('before_config', setup_application)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/application.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""WSGI middleware initialization for the Kallithea application."""
+
+from kallithea.config.app_cfg import base_config
+from kallithea.config.middleware.https_fixup import HttpsFixup
+from kallithea.config.middleware.permanent_repo_url import PermanentRepoUrl
+from kallithea.config.middleware.simplegit import SimpleGit
+from kallithea.config.middleware.simplehg import SimpleHg
+from kallithea.config.middleware.wrapper import RequestWrapper
+from kallithea.lib.utils2 import asbool
+
+
+__all__ = ['make_app']
+
+
+def wrap_app(app):
+    """Wrap the TG WSGI application in Kallithea middleware"""
+    config = app.config
+
+    # we want our low level middleware to get to the request ASAP. We don't
+    # need any stack middleware in them - especially no StatusCodeRedirect buffering
+    app = SimpleHg(app, config)
+    app = SimpleGit(app, config)
+
+    # Enable https redirects based on HTTP_X_URL_SCHEME set by proxy
+    if any(asbool(config.get(x)) for x in ['url_scheme_variable', 'force_https', 'use_htsts']):
+        app = HttpsFixup(app, config)
+
+    app = PermanentRepoUrl(app, config)
+
+    # Optional and undocumented wrapper - gives more verbose request/response logging, but has a slight overhead
+    if asbool(config.get('use_wsgi_wrapper')):
+        app = RequestWrapper(app, config)
+
+    return app
+
+
+def make_app(global_conf, **app_conf):
+    """
+    Set up Kallithea with the settings found in the PasteDeploy configuration
+    file used.
+
+    :param global_conf: The global settings for Kallithea (those
+        defined under the ``[DEFAULT]`` section).
+    :return: The Kallithea application with all the relevant middleware
+        loaded.
+
+    This is the PasteDeploy factory for the Kallithea application.
+
+    ``app_conf`` contains all the application-specific settings (those defined
+    under ``[app:main]``.
+    """
+    assert app_conf.get('sqlalchemy.url')  # must be called with a Kallithea .ini file, which for example must have this config option
+    assert global_conf.get('here') and global_conf.get('__file__')  # app config should be initialized the paste way ...
+
+    return base_config.make_wsgi_app(global_conf, app_conf, wrap_app=wrap_app)
--- a/kallithea/config/conf.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.config.conf
-~~~~~~~~~~~~~~~~~~~~~
-
-Various config settings for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Mar 7, 2012
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-from kallithea.lib import pygmentsutils
-
-
-# language map is also used by whoosh indexer, which for those specified
-# extensions will index it's content
-LANGUAGES_EXTENSIONS_MAP = pygmentsutils.get_extension_descriptions()
-
-# Whoosh index targets
-
-# Extensions we want to index content of using whoosh
-INDEX_EXTENSIONS = list(LANGUAGES_EXTENSIONS_MAP)
-
-# Filenames we want to index content of using whoosh
-INDEX_FILENAMES = pygmentsutils.get_index_filenames()
-
-# list of readme files to search in file tree and display in summary
-# attached weights defines the search  order lower is first
-ALL_READMES = [
-    ('readme', 0), ('README', 0), ('Readme', 0),
-    ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1),
-    ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2),
-    ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2),
-    ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2),
-]
-
-# extension together with weights to search lower is first
-RST_EXTS = [
-    ('', 0), ('.rst', 1), ('.rest', 1),
-    ('.RST', 2), ('.REST', 2),
-    ('.txt', 3), ('.TXT', 3)
-]
-
-MARKDOWN_EXTS = [
-    ('.md', 1), ('.MD', 1),
-    ('.mkdn', 2), ('.MKDN', 2),
-    ('.mdown', 3), ('.MDOWN', 3),
-    ('.markdown', 4), ('.MARKDOWN', 4)
-]
-
-PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)]
-
-ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS
--- a/kallithea/config/environment.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""WSGI environment setup for Kallithea."""
-
-from kallithea.config.app_cfg import base_config
-
-
-__all__ = ['load_environment']
-
-# Use base_config to setup the environment loader function
-load_environment = base_config.make_load_environment()
--- a/kallithea/config/middleware.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,47 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""WSGI middleware initialization for the Kallithea application."""
-
-from kallithea.config.app_cfg import base_config
-from kallithea.config.environment import load_environment
-
-
-__all__ = ['make_app']
-
-# Use base_config to setup the necessary PasteDeploy application factory.
-# make_base_app will wrap the TurboGears2 app with all the middleware it needs.
-make_base_app = base_config.setup_tg_wsgi_app(load_environment)
-
-
-def make_app(global_conf, full_stack=True, **app_conf):
-    """
-    Set up Kallithea with the settings found in the PasteDeploy configuration
-    file used.
-
-    :param global_conf: The global settings for Kallithea (those
-        defined under the ``[DEFAULT]`` section).
-    :type global_conf: dict
-    :param full_stack: Should the whole TurboGears2 stack be set up?
-    :type full_stack: str or bool
-    :return: The Kallithea application with all the relevant middleware
-        loaded.
-
-    This is the PasteDeploy factory for the Kallithea application.
-
-    ``app_conf`` contains all the application-specific settings (those defined
-    under ``[app:main]``.
-    """
-    assert app_conf.get('sqlalchemy.url')  # must be called with a Kallithea .ini file, which for example must have this config option
-    assert global_conf.get('here') and global_conf.get('__file__')  # app config should be initialized the paste way ...
-    return make_base_app(global_conf, full_stack=full_stack, **app_conf)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/middleware/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,13 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/middleware/https_fixup.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.config.middleware.https_fixup
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+middleware to handle https correctly
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: May 23, 2010
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+"""
+
+
+import kallithea
+from kallithea.lib.utils2 import asbool
+
+
+class HttpsFixup(object):
+
+    def __init__(self, app, config):
+        self.application = app
+        self.config = config
+
+    def __call__(self, environ, start_response):
+        self.__fixup(environ)
+        debug = asbool(self.config.get('debug'))
+        is_ssl = environ['wsgi.url_scheme'] == 'https'
+
+        def custom_start_response(status, headers, exc_info=None):
+            if is_ssl and asbool(self.config.get('use_htsts')) and not debug:
+                headers.append(('Strict-Transport-Security',
+                                'max-age=8640000; includeSubDomains'))
+            return start_response(status, headers, exc_info)
+
+        return self.application(environ, custom_start_response)
+
+    def __fixup(self, environ):
+        """
+        Function to fixup the environ as needed. In order to use this
+        middleware you should set this header inside your
+        proxy ie. nginx, apache etc.
+        """
+        proto = None
+
+        # if we have force, just override
+        if asbool(self.config.get('force_https')):
+            proto = 'https'
+        else:
+            # get protocol from configured WSGI environment variable
+            url_scheme_variable = kallithea.CONFIG.get('url_scheme_variable')
+            if url_scheme_variable:
+                proto = environ.get(url_scheme_variable)
+
+        if proto:
+            environ['wsgi._org_proto'] = environ.get('wsgi.url_scheme')
+            environ['wsgi.url_scheme'] = proto
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/middleware/permanent_repo_url.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.config.middleware.permanent_repo_url
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+middleware to handle permanent repo URLs, replacing PATH_INFO '/_123/yada' with
+'/name/of/repo/yada' after looking 123 up in the database.
+"""
+
+
+from kallithea.lib.utils import fix_repo_id_name
+from kallithea.lib.utils2 import safe_bytes, safe_str
+
+
+class PermanentRepoUrl(object):
+
+    def __init__(self, app, config):
+        self.application = app
+        self.config = config
+
+    def __call__(self, environ, start_response):
+        # Extract path_info as get_path_info does, but do it explicitly because
+        # we also have to do the reverse operation when patching it back in
+        path_info = safe_str(environ['PATH_INFO'].encode('latin1'))
+        if path_info.startswith('/'): # it must
+            path_info = '/' + fix_repo_id_name(path_info[1:])
+            environ['PATH_INFO'] = safe_bytes(path_info).decode('latin1')
+
+        return self.application(environ, start_response)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/middleware/pygrack.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,227 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.config.middleware.pygrack
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Python implementation of git-http-backend's Smart HTTP protocol
+
+Based on original code from git_http_backend.py project.
+
+Copyright (c) 2010 Daniel Dotsenko <dotsa@hotmail.com>
+Copyright (c) 2012 Marcin Kuzminski <marcin@python-works.com>
+
+This file was forked by the Kallithea project in July 2014.
+"""
+
+import logging
+import os
+import socket
+import traceback
+
+from dulwich.server import update_server_info
+from dulwich.web import GunzipFilter, LimitedInputFilter
+from webob import Request, Response, exc
+
+import kallithea
+from kallithea.lib.utils2 import ascii_bytes
+from kallithea.lib.vcs import get_repo, subprocessio
+
+
+log = logging.getLogger(__name__)
+
+
+class FileWrapper(object):
+
+    def __init__(self, fd, content_length):
+        self.fd = fd
+        self.content_length = content_length
+        self.remain = content_length
+
+    def read(self, size):
+        if size <= self.remain:
+            try:
+                data = self.fd.read(size)
+            except socket.error:
+                raise IOError(self)
+            self.remain -= size
+        elif self.remain:
+            data = self.fd.read(self.remain)
+            self.remain = 0
+        else:
+            data = None
+        return data
+
+    def __repr__(self):
+        return '<FileWrapper %s len: %s, read: %s>' % (
+            self.fd, self.content_length, self.content_length - self.remain
+        )
+
+
+class GitRepository(object):
+    git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
+    commands = ['git-upload-pack', 'git-receive-pack']
+
+    def __init__(self, repo_name, content_path):
+        files = set([f.lower() for f in os.listdir(content_path)])
+        if not (self.git_folder_signature.intersection(files)
+                == self.git_folder_signature):
+            raise OSError('%s missing git signature' % content_path)
+        self.content_path = content_path
+        self.valid_accepts = ['application/x-%s-result' %
+                              c for c in self.commands]
+        self.repo_name = repo_name
+
+    def _get_fixedpath(self, path):
+        """
+        Small fix for repo_path
+
+        :param path:
+        """
+        assert path.startswith('/' + self.repo_name + '/')
+        return path[len(self.repo_name) + 2:].strip('/')
+
+    def inforefs(self, req, environ):
+        """
+        WSGI Response producer for HTTP GET Git Smart
+        HTTP /info/refs request.
+        """
+
+        git_command = req.GET.get('service')
+        if git_command not in self.commands:
+            log.debug('command %s not allowed', git_command)
+            return exc.HTTPMethodNotAllowed()
+
+        # From Documentation/technical/http-protocol.txt shipped with Git:
+        #
+        # Clients MUST verify the first pkt-line is `# service=$servicename`.
+        # Servers MUST set $servicename to be the request parameter value.
+        # Servers SHOULD include an LF at the end of this line.
+        # Clients MUST ignore an LF at the end of the line.
+        #
+        #  smart_reply     =  PKT-LINE("# service=$servicename" LF)
+        #                     ref_list
+        #                     "0000"
+        server_advert = '# service=%s\n' % git_command
+        packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
+        _git_path = kallithea.CONFIG.get('git_path', 'git')
+        cmd = [_git_path, git_command[4:],
+               '--stateless-rpc', '--advertise-refs', self.content_path]
+        log.debug('handling cmd %s', cmd)
+        try:
+            out = subprocessio.SubprocessIOChunker(cmd,
+                starting_values=[ascii_bytes(packet_len + server_advert + '0000')]
+            )
+        except EnvironmentError as e:
+            log.error(traceback.format_exc())
+            raise exc.HTTPExpectationFailed()
+        resp = Response()
+        resp.content_type = 'application/x-%s-advertisement' % git_command
+        resp.charset = None
+        resp.app_iter = out
+        return resp
+
+    def backend(self, req, environ):
+        """
+        WSGI Response producer for HTTP POST Git Smart HTTP requests.
+        Reads commands and data from HTTP POST's body.
+        returns an iterator obj with contents of git command's
+        response to stdout
+        """
+        _git_path = kallithea.CONFIG.get('git_path', 'git')
+        git_command = self._get_fixedpath(req.path_info)
+        if git_command not in self.commands:
+            log.debug('command %s not allowed', git_command)
+            return exc.HTTPMethodNotAllowed()
+
+        if 'CONTENT_LENGTH' in environ:
+            inputstream = FileWrapper(environ['wsgi.input'],
+                                      req.content_length)
+        else:
+            inputstream = environ['wsgi.input']
+
+        gitenv = dict(os.environ)
+        # forget all configs
+        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
+        cmd = [_git_path, git_command[4:], '--stateless-rpc', self.content_path]
+        log.debug('handling cmd %s', cmd)
+        try:
+            out = subprocessio.SubprocessIOChunker(
+                cmd,
+                inputstream=inputstream,
+                env=gitenv,
+                cwd=self.content_path,
+            )
+        except EnvironmentError as e:
+            log.error(traceback.format_exc())
+            raise exc.HTTPExpectationFailed()
+
+        if git_command in ['git-receive-pack']:
+            # updating refs manually after each push.
+            # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
+            repo = get_repo(self.content_path)
+            if repo:
+                update_server_info(repo._repo)
+
+        resp = Response()
+        resp.content_type = 'application/x-%s-result' % git_command
+        resp.charset = None
+        resp.app_iter = out
+        return resp
+
+    def __call__(self, environ, start_response):
+        req = Request(environ)
+        _path = self._get_fixedpath(req.path_info)
+        if _path.startswith('info/refs'):
+            app = self.inforefs
+        elif req.accept.acceptable_offers(self.valid_accepts):
+            app = self.backend
+        try:
+            resp = app(req, environ)
+        except exc.HTTPException as e:
+            resp = e
+            log.error(traceback.format_exc())
+        except Exception as e:
+            log.error(traceback.format_exc())
+            resp = exc.HTTPInternalServerError()
+        return resp(environ, start_response)
+
+
+class GitDirectory(object):
+
+    def __init__(self, repo_root, repo_name):
+        repo_location = os.path.join(repo_root, repo_name)
+        if not os.path.isdir(repo_location):
+            raise OSError(repo_location)
+
+        self.content_path = repo_location
+        self.repo_name = repo_name
+        self.repo_location = repo_location
+
+    def __call__(self, environ, start_response):
+        content_path = self.content_path
+        try:
+            app = GitRepository(self.repo_name, content_path)
+        except (AssertionError, OSError):
+            content_path = os.path.join(content_path, '.git')
+            if os.path.isdir(content_path):
+                app = GitRepository(self.repo_name, content_path)
+            else:
+                return exc.HTTPNotFound()(environ, start_response)
+        return app(environ, start_response)
+
+
+def make_wsgi_app(repo_name, repo_root):
+    app = GitDirectory(repo_root, repo_name)
+    return GunzipFilter(LimitedInputFilter(app))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/middleware/simplegit.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.config.middleware.simplegit
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+SimpleGit middleware for handling Git protocol requests (push/clone etc.)
+It's implemented with basic auth function
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: Apr 28, 2010
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+
+"""
+
+
+import logging
+import re
+
+from kallithea.config.middleware.pygrack import make_wsgi_app
+from kallithea.controllers import base
+from kallithea.lib import hooks
+
+
+log = logging.getLogger(__name__)
+
+
+GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)$')
+
+
+cmd_mapping = {
+    'git-receive-pack': 'push',
+    'git-upload-pack': 'pull',
+}
+
+
+class SimpleGit(base.BaseVCSController):
+
+    scm_alias = 'git'
+
+    @classmethod
+    def parse_request(cls, environ):
+        path_info = base.get_path_info(environ)
+        m = GIT_PROTO_PAT.match(path_info)
+        if m is None:
+            return None
+
+        class parsed_request(object):
+            # See https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_the_smart_protocol
+            repo_name = m.group(1).rstrip('/')
+            cmd = m.group(2)
+
+            query_string = environ['QUERY_STRING']
+            if cmd == 'info/refs' and query_string.startswith('service='):
+                service = query_string.split('=', 1)[1]
+                action = cmd_mapping.get(service)
+            else:
+                service = None
+                action = cmd_mapping.get(cmd)
+
+        return parsed_request
+
+    def _make_app(self, parsed_request):
+        """
+        Return a pygrack wsgi application.
+        """
+        pygrack_app = make_wsgi_app(parsed_request.repo_name, self.basepath)
+
+        def wrapper_app(environ, start_response):
+            if (parsed_request.cmd == 'info/refs' and
+                parsed_request.service == 'git-upload-pack'
+            ):
+                # Run hooks like Mercurial outgoing.kallithea_pull_action does
+                hooks.log_pull_action()
+            # Note: push hooks are handled by post-receive hook
+
+            return pygrack_app(environ, start_response)
+
+        return wrapper_app
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/middleware/simplehg.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.config.middleware.simplehg
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+SimpleHg middleware for handling Mercurial protocol requests (push/clone etc.).
+It's implemented with basic auth function
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: Apr 28, 2010
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+
+"""
+
+
+import logging
+import os
+import urllib.parse
+
+import mercurial.hgweb
+
+from kallithea.controllers import base
+from kallithea.lib.utils import make_ui
+from kallithea.lib.utils2 import safe_bytes
+
+
+log = logging.getLogger(__name__)
+
+
+def get_header_hgarg(environ):
+    """Decode the special Mercurial encoding of big requests over multiple headers.
+    >>> get_header_hgarg({})
+    ''
+    >>> get_header_hgarg({'HTTP_X_HGARG_0': ' ', 'HTTP_X_HGARG_1': 'a','HTTP_X_HGARG_2': '','HTTP_X_HGARG_3': 'b+c %20'})
+    'ab+c %20'
+    """
+    chunks = []
+    i = 1
+    while True:
+        v = environ.get('HTTP_X_HGARG_%d' % i)
+        if v is None:
+            break
+        chunks.append(v)
+        i += 1
+    return ''.join(chunks)
+
+
+cmd_mapping = {
+    # 'batch' is not in this list - it is handled explicitly
+    'between': 'pull',
+    'branches': 'pull',
+    'branchmap': 'pull',
+    'capabilities': 'pull',
+    'changegroup': 'pull',
+    'changegroupsubset': 'pull',
+    'changesetdata': 'pull',
+    'clonebundles': 'pull',
+    'debugwireargs': 'pull',
+    'filedata': 'pull',
+    'getbundle': 'pull',
+    'getlfile': 'pull',
+    'heads': 'pull',
+    'hello': 'pull',
+    'known': 'pull',
+    'lheads': 'pull',
+    'listkeys': 'pull',
+    'lookup': 'pull',
+    'manifestdata': 'pull',
+    'narrow_widen': 'pull',
+    'protocaps': 'pull',
+    'statlfile': 'pull',
+    'stream_out': 'pull',
+    'pushkey': 'push',
+    'putlfile': 'push',
+    'unbundle': 'push',
+    }
+
+
+class SimpleHg(base.BaseVCSController):
+
+    scm_alias = 'hg'
+
+    @classmethod
+    def parse_request(cls, environ):
+        http_accept = environ.get('HTTP_ACCEPT', '')
+        if not http_accept.startswith('application/mercurial'):
+            return None
+        path_info = base.get_path_info(environ)
+        if not path_info.startswith('/'): # it must!
+            return None
+
+        class parsed_request(object):
+            repo_name = path_info[1:].rstrip('/')
+
+            query_string = environ['QUERY_STRING']
+
+            action = None
+            for qry in query_string.split('&'):
+                parts = qry.split('=', 1)
+                if len(parts) == 2 and parts[0] == 'cmd':
+                    cmd = parts[1]
+                    if cmd == 'batch':
+                        hgarg = get_header_hgarg(environ)
+                        if not hgarg.startswith('cmds='):
+                            action = 'push' # paranoid and safe
+                            break
+                        action = 'pull'
+                        for cmd_arg in hgarg[5:].split(';'):
+                            cmd, _args = urllib.parse.unquote_plus(cmd_arg).split(' ', 1)
+                            op = cmd_mapping.get(cmd, 'push')
+                            if op != 'pull':
+                                assert op == 'push'
+                                action = 'push'
+                                break
+                    else:
+                        action = cmd_mapping.get(cmd, 'push')
+                    break # only process one cmd
+
+        return parsed_request
+
+    def _make_app(self, parsed_request):
+        """
+        Make an hgweb wsgi application.
+        """
+        repo_name = parsed_request.repo_name
+        repo_path = os.path.join(self.basepath, repo_name)
+        baseui = make_ui(repo_path=repo_path)
+        hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
+
+        def wrapper_app(environ, start_response):
+            environ['REPO_NAME'] = repo_name # used by mercurial.hgweb.hgweb
+            return hgweb_app(environ, start_response)
+
+        return wrapper_app
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/config/middleware/wrapper.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.config.middleware.wrapper
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Wrap app to measure request and response time ... all the way to the response
+WSGI iterator has been closed.
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: May 23, 2013
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+"""
+
+import logging
+import time
+
+from kallithea.controllers import base
+
+
+log = logging.getLogger(__name__)
+
+
+class Meter:
+
+    def __init__(self, start_response):
+        self._start_response = start_response
+        self._start = time.time()
+        self.status = None
+        self._size = 0
+
+    def duration(self):
+        return time.time() - self._start
+
+    def start_response(self, status, response_headers, exc_info=None):
+        self.status = status
+        write = self._start_response(status, response_headers, exc_info)
+        def metered_write(s):
+            self.measure(s)
+            write(s)
+        return metered_write
+
+    def measure(self, chunk):
+        self._size += len(chunk)
+
+    def size(self):
+        return self._size
+
+
+class ResultIter:
+
+    def __init__(self, result, meter, description):
+        self._result_close = getattr(result, 'close', None) or (lambda: None)
+        self._next = iter(result).__next__
+        self._meter = meter
+        self._description = description
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        chunk = self._next()
+        self._meter.measure(chunk)
+        return chunk
+
+    def close(self):
+        self._result_close()
+        log.info("%s responded %r after %.3fs with %s bytes", self._description, self._meter.status, self._meter.duration(), self._meter.size())
+
+
+class RequestWrapper(object):
+
+    def __init__(self, app, config):
+        self.application = app
+        self.config = config
+
+    def __call__(self, environ, start_response):
+        meter = Meter(start_response)
+        description = "Request from %s for %s" % (
+            base.get_ip_addr(environ),
+            base.get_path_info(environ),
+        )
+        log.info("%s received", description)
+        try:
+            result = self.application(environ, meter.start_response)
+        finally:
+            log.info("%s responding %r after %.3fs", description, meter.status, meter.duration())
+        return ResultIter(result, meter, description)
--- a/kallithea/config/post_receive_tmpl.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-"""Kallithea Git hook
-
-This hook is installed and maintained by Kallithea. It will be overwritten
-by Kallithea - don't customize it manually!
-
-When Kallithea invokes Git, the KALLITHEA_EXTRAS environment variable will
-contain additional info like the Kallithea instance and user info that this
-hook will use.
-"""
-
-import os
-import sys
-
-import kallithea.lib.hooks
-
-
-# Set output mode on windows to binary for stderr.
-# This prevents python (or the windows console) from replacing \n with \r\n.
-# Git doesn't display remote output lines that contain \r,
-# and therefore without this modification git would display empty lines
-# instead of the exception output.
-if sys.platform == "win32":
-    import msvcrt
-    msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
-
-KALLITHEA_HOOK_VER = '_TMPL_'
-os.environ['KALLITHEA_HOOK_VER'] = KALLITHEA_HOOK_VER
-
-
-def main():
-    repo_path = os.path.abspath('.')
-    git_stdin_lines = sys.stdin.readlines()
-    sys.exit(kallithea.lib.hooks.handle_git_post_receive(repo_path, git_stdin_lines))
-
-
-if __name__ == '__main__':
-    main()
--- a/kallithea/config/pre_receive_tmpl.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-"""Kallithea Git hook
-
-This hook is installed and maintained by Kallithea. It will be overwritten
-by Kallithea - don't customize it manually!
-
-When Kallithea invokes Git, the KALLITHEA_EXTRAS environment variable will
-contain additional info like the Kallithea instance and user info that this
-hook will use.
-"""
-
-import os
-import sys
-
-import kallithea.lib.hooks
-
-
-# Set output mode on windows to binary for stderr.
-# This prevents python (or the windows console) from replacing \n with \r\n.
-# Git doesn't display remote output lines that contain \r,
-# and therefore without this modification git would display empty lines
-# instead of the exception output.
-if sys.platform == "win32":
-    import msvcrt
-    msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
-
-KALLITHEA_HOOK_VER = '_TMPL_'
-os.environ['KALLITHEA_HOOK_VER'] = KALLITHEA_HOOK_VER
-
-
-def main():
-    repo_path = os.path.abspath('.')
-    git_stdin_lines = sys.stdin.readlines()
-    sys.exit(kallithea.lib.hooks.handle_git_pre_receive(repo_path, git_stdin_lines))
-
-
-if __name__ == '__main__':
-    main()
--- a/kallithea/config/rcextensions/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,225 +0,0 @@
-# Additional mappings that are not present in the pygments lexers
-# used for building stats
-# format is {'ext':['Names']} eg. {'py':['Python']} note: there can be
-# more than one name for extension
-# NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP
-# build by pygments
-EXTRA_MAPPINGS = {}
-
-# additional lexer definitions for custom files
-# it's overrides pygments lexers, and uses defined name of lexer to colorize the
-# files. Format is {'ext': 'lexer_name'}
-# List of lexers can be printed running:
-# python -c "import pprint;from pygments import lexers;pprint.pprint([(x[0], x[1]) for x in lexers.get_all_lexers()]);"
-
-EXTRA_LEXERS = {}
-
-#==============================================================================
-# WHOOSH INDEX EXTENSIONS
-#==============================================================================
-# if INDEX_EXTENSIONS is [] it'll use pygments lexers extensions by default.
-# To set your own just add to this list extensions to index with content
-INDEX_EXTENSIONS = []
-
-# additional extensions for indexing besides the default from pygments
-# those gets added to INDEX_EXTENSIONS
-EXTRA_INDEX_EXTENSIONS = []
-
-
-#==============================================================================
-# POST CREATE REPOSITORY HOOK
-#==============================================================================
-# this function will be executed after each repository is created
-def _crrepohook(*args, **kwargs):
-    """
-    Post create repository HOOK
-    kwargs available:
-     :param repo_name:
-     :param repo_type:
-     :param description:
-     :param private:
-     :param created_on:
-     :param enable_downloads:
-     :param repo_id:
-     :param owner_id:
-     :param enable_statistics:
-     :param clone_uri:
-     :param fork_id:
-     :param group_id:
-     :param created_by:
-    """
-    return 0
-
-
-CREATE_REPO_HOOK = _crrepohook
-
-
-#==============================================================================
-# PRE CREATE USER HOOK
-#==============================================================================
-# this function will be executed before each user is created
-def _pre_cruserhook(*args, **kwargs):
-    """
-    Pre create user HOOK, it returns a tuple of bool, reason.
-    If bool is False the user creation will be stopped and reason
-    will be displayed to the user.
-    kwargs available:
-    :param username:
-    :param password:
-    :param email:
-    :param firstname:
-    :param lastname:
-    :param active:
-    :param admin:
-    :param created_by:
-    """
-    reason = 'allowed'
-    return True, reason
-
-
-PRE_CREATE_USER_HOOK = _pre_cruserhook
-
-#==============================================================================
-# POST CREATE USER HOOK
-#==============================================================================
-# this function will be executed after each user is created
-def _cruserhook(*args, **kwargs):
-    """
-    Post create user HOOK
-    kwargs available:
-      :param username:
-      :param full_name_or_username:
-      :param full_contact:
-      :param user_id:
-      :param name:
-      :param firstname:
-      :param short_contact:
-      :param admin:
-      :param lastname:
-      :param ip_addresses:
-      :param ldap_dn:
-      :param email:
-      :param api_key:
-      :param last_login:
-      :param full_name:
-      :param active:
-      :param password:
-      :param emails:
-      :param created_by:
-    """
-    return 0
-
-
-CREATE_USER_HOOK = _cruserhook
-
-
-#==============================================================================
-# POST DELETE REPOSITORY HOOK
-#==============================================================================
-# this function will be executed after each repository deletion
-def _dlrepohook(*args, **kwargs):
-    """
-    Post delete repository HOOK
-    kwargs available:
-     :param repo_name:
-     :param repo_type:
-     :param description:
-     :param private:
-     :param created_on:
-     :param enable_downloads:
-     :param repo_id:
-     :param owner_id:
-     :param enable_statistics:
-     :param clone_uri:
-     :param fork_id:
-     :param group_id:
-     :param deleted_by:
-     :param deleted_on:
-    """
-    return 0
-
-
-DELETE_REPO_HOOK = _dlrepohook
-
-
-#==============================================================================
-# POST DELETE USER HOOK
-#==============================================================================
-# this function will be executed after each user is deleted
-def _dluserhook(*args, **kwargs):
-    """
-    Post delete user HOOK
-    kwargs available:
-      :param username:
-      :param full_name_or_username:
-      :param full_contact:
-      :param user_id:
-      :param name:
-      :param firstname:
-      :param short_contact:
-      :param admin:
-      :param lastname:
-      :param ip_addresses:
-      :param ldap_dn:
-      :param email:
-      :param api_key:
-      :param last_login:
-      :param full_name:
-      :param active:
-      :param password:
-      :param emails:
-      :param deleted_by:
-    """
-    return 0
-
-
-DELETE_USER_HOOK = _dluserhook
-
-
-#==============================================================================
-# POST PUSH HOOK
-#==============================================================================
-
-# this function will be executed after each push it's executed after the
-# build-in hook that Kallithea uses for logging pushes
-def _pushhook(*args, **kwargs):
-    """
-    Post push hook
-    kwargs available:
-
-      :param config: path to .ini config used
-      :param scm: type of VS 'git' or 'hg'
-      :param username: name of user who pushed
-      :param ip: ip of who pushed
-      :param action: push
-      :param repository: repository name
-      :param pushed_revs: list of pushed revisions
-    """
-    return 0
-
-
-PUSH_HOOK = _pushhook
-
-
-#==============================================================================
-# POST PULL HOOK
-#==============================================================================
-
-# this function will be executed after each push it's executed after the
-# build-in hook that Kallithea uses for logging pulls
-def _pullhook(*args, **kwargs):
-    """
-    Post pull hook
-    kwargs available::
-
-      :param config: path to .ini config used
-      :param scm: type of VS 'git' or 'hg'
-      :param username: name of user who pulled
-      :param ip: ip of who pulled
-      :param action: pull
-      :param repository: repository name
-    """
-    return 0
-
-
-PULL_HOOK = _pullhook
--- a/kallithea/config/routing.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,803 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-Routes configuration
-
-The more specific and detailed routes should be defined first so they
-may take precedent over the more generic routes. For more information
-refer to the routes manual at http://routes.groovie.org/docs/
-"""
-
-import routes
-from tg import request
-
-from kallithea.lib.utils2 import safe_str
-
-
-# prefix for non repository related links needs to be prefixed with `/`
-ADMIN_PREFIX = '/_admin'
-
-
-class Mapper(routes.Mapper):
-    """
-    Subclassed Mapper with routematch patched to decode "unicode" str url to
-    *real* unicode str before applying matches and invoking controller methods.
-    """
-
-    def routematch(self, url=None, environ=None):
-        """
-        routematch that also decode url from "fake bytes" to real unicode
-        string before matching and invoking controllers.
-        """
-        # Process url like get_path_info does ... but PATH_INFO has already
-        # been retrieved from environ and is passed, so - let's just use that
-        # instead.
-        url = safe_str(url.encode('latin1'))
-        return super().routematch(url=url, environ=environ)
-
-
-def make_map(config):
-    """Create, configure and return the routes Mapper"""
-    rmap = Mapper(directory=config['paths']['controllers'],
-                  always_scan=config['debug'])
-    rmap.minimization = False
-    rmap.explicit = False
-
-    from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
-
-    def check_repo(environ, match_dict):
-        """
-        Check for valid repository for proper 404 handling.
-        Also, a bit of side effect modifying match_dict ...
-        """
-        if match_dict.get('f_path'):
-            # fix for multiple initial slashes that causes errors
-            match_dict['f_path'] = match_dict['f_path'].lstrip('/')
-
-        return is_valid_repo(match_dict['repo_name'], config['base_path'])
-
-    def check_group(environ, match_dict):
-        """
-        check for valid repository group for proper 404 handling
-
-        :param environ:
-        :param match_dict:
-        """
-        repo_group_name = match_dict.get('group_name')
-        return is_valid_repo_group(repo_group_name, config['base_path'])
-
-    def check_group_skip_path(environ, match_dict):
-        """
-        check for valid repository group for proper 404 handling, but skips
-        verification of existing path
-
-        :param environ:
-        :param match_dict:
-        """
-        repo_group_name = match_dict.get('group_name')
-        return is_valid_repo_group(repo_group_name, config['base_path'],
-                                   skip_path_check=True)
-
-    def check_user_group(environ, match_dict):
-        """
-        check for valid user group for proper 404 handling
-
-        :param environ:
-        :param match_dict:
-        """
-        return True
-
-    def check_int(environ, match_dict):
-        return match_dict.get('id').isdigit()
-
-    #==========================================================================
-    # CUSTOM ROUTES HERE
-    #==========================================================================
-
-    # MAIN PAGE
-    rmap.connect('home', '/', controller='home')
-    rmap.connect('about', '/about', controller='home', action='about')
-    rmap.redirect('/favicon.ico', '/images/favicon.ico')
-    rmap.connect('repo_switcher_data', '/_repos', controller='home',
-                 action='repo_switcher_data')
-    rmap.connect('users_and_groups_data', '/_users_and_groups', controller='home',
-                 action='users_and_groups_data')
-
-    rmap.connect('rst_help',
-                 "http://docutils.sourceforge.net/docs/user/rst/quickref.html",
-                 _static=True)
-    rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True)
-    rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True)
-
-    # ADMIN REPOSITORY ROUTES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/repos') as m:
-        m.connect("repos", "/repos",
-                  action="create", conditions=dict(method=["POST"]))
-        m.connect("repos", "/repos",
-                  conditions=dict(method=["GET"]))
-        m.connect("new_repo", "/create_repository",
-                  action="create_repository", conditions=dict(method=["GET"]))
-        m.connect("update_repo", "/repos/{repo_name:.*?}",
-                  action="update", conditions=dict(method=["POST"],
-                  function=check_repo))
-        m.connect("delete_repo", "/repos/{repo_name:.*?}/delete",
-                  action="delete", conditions=dict(method=["POST"]))
-
-    # ADMIN REPOSITORY GROUPS ROUTES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/repo_groups') as m:
-        m.connect("repos_groups", "/repo_groups",
-                  action="create", conditions=dict(method=["POST"]))
-        m.connect("repos_groups", "/repo_groups",
-                  conditions=dict(method=["GET"]))
-        m.connect("new_repos_group", "/repo_groups/new",
-                  action="new", conditions=dict(method=["GET"]))
-        m.connect("update_repos_group", "/repo_groups/{group_name:.*?}",
-                  action="update", conditions=dict(method=["POST"],
-                                                   function=check_group))
-
-        m.connect("repos_group", "/repo_groups/{group_name:.*?}",
-                  action="show", conditions=dict(method=["GET"],
-                                                 function=check_group))
-
-        # EXTRAS REPO GROUP ROUTES
-        m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit",
-                  action="edit",
-                  conditions=dict(method=["GET"], function=check_group))
-
-        m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced",
-                  action="edit_repo_group_advanced",
-                  conditions=dict(method=["GET"], function=check_group))
-
-        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
-                  action="edit_repo_group_perms",
-                  conditions=dict(method=["GET"], function=check_group))
-        m.connect("edit_repo_group_perms_update", "/repo_groups/{group_name:.*?}/edit/permissions",
-                  action="update_perms",
-                  conditions=dict(method=["POST"], function=check_group))
-        m.connect("edit_repo_group_perms_delete", "/repo_groups/{group_name:.*?}/edit/permissions/delete",
-                  action="delete_perms",
-                  conditions=dict(method=["POST"], function=check_group))
-
-        m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}/delete",
-                  action="delete", conditions=dict(method=["POST"],
-                                                   function=check_group_skip_path))
-
-    # ADMIN USER ROUTES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/users') as m:
-        m.connect("new_user", "/users/new",
-                  action="create", conditions=dict(method=["POST"]))
-        m.connect("users", "/users",
-                  conditions=dict(method=["GET"]))
-        m.connect("formatted_users", "/users.{format}",
-                  conditions=dict(method=["GET"]))
-        m.connect("new_user", "/users/new",
-                  action="new", conditions=dict(method=["GET"]))
-        m.connect("update_user", "/users/{id}",
-                  action="update", conditions=dict(method=["POST"]))
-        m.connect("delete_user", "/users/{id}/delete",
-                  action="delete", conditions=dict(method=["POST"]))
-        m.connect("edit_user", "/users/{id}/edit",
-                  action="edit", conditions=dict(method=["GET"]))
-
-        # EXTRAS USER ROUTES
-        m.connect("edit_user_advanced", "/users/{id}/edit/advanced",
-                  action="edit_advanced", conditions=dict(method=["GET"]))
-
-        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
-                  action="edit_api_keys", conditions=dict(method=["GET"]))
-        m.connect("edit_user_api_keys_update", "/users/{id}/edit/api_keys",
-                  action="add_api_key", conditions=dict(method=["POST"]))
-        m.connect("edit_user_api_keys_delete", "/users/{id}/edit/api_keys/delete",
-                  action="delete_api_key", conditions=dict(method=["POST"]))
-
-        m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys",
-                  action="edit_ssh_keys", conditions=dict(method=["GET"]))
-        m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys",
-                  action="ssh_keys_add", conditions=dict(method=["POST"]))
-        m.connect("edit_user_ssh_keys_delete", "/users/{id}/edit/ssh_keys/delete",
-                  action="ssh_keys_delete", conditions=dict(method=["POST"]))
-
-        m.connect("edit_user_perms", "/users/{id}/edit/permissions",
-                  action="edit_perms", conditions=dict(method=["GET"]))
-        m.connect("edit_user_perms_update", "/users/{id}/edit/permissions",
-                  action="update_perms", conditions=dict(method=["POST"]))
-
-        m.connect("edit_user_emails", "/users/{id}/edit/emails",
-                  action="edit_emails", conditions=dict(method=["GET"]))
-        m.connect("edit_user_emails_update", "/users/{id}/edit/emails",
-                  action="add_email", conditions=dict(method=["POST"]))
-        m.connect("edit_user_emails_delete", "/users/{id}/edit/emails/delete",
-                  action="delete_email", conditions=dict(method=["POST"]))
-
-        m.connect("edit_user_ips", "/users/{id}/edit/ips",
-                  action="edit_ips", conditions=dict(method=["GET"]))
-        m.connect("edit_user_ips_update", "/users/{id}/edit/ips",
-                  action="add_ip", conditions=dict(method=["POST"]))
-        m.connect("edit_user_ips_delete", "/users/{id}/edit/ips/delete",
-                  action="delete_ip", conditions=dict(method=["POST"]))
-
-    # ADMIN USER GROUPS REST ROUTES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/user_groups') as m:
-        m.connect("users_groups", "/user_groups",
-                  action="create", conditions=dict(method=["POST"]))
-        m.connect("users_groups", "/user_groups",
-                  conditions=dict(method=["GET"]))
-        m.connect("new_users_group", "/user_groups/new",
-                  action="new", conditions=dict(method=["GET"]))
-        m.connect("update_users_group", "/user_groups/{id}",
-                  action="update", conditions=dict(method=["POST"]))
-        m.connect("delete_users_group", "/user_groups/{id}/delete",
-                  action="delete", conditions=dict(method=["POST"]))
-        m.connect("edit_users_group", "/user_groups/{id}/edit",
-                  action="edit", conditions=dict(method=["GET"]),
-                  function=check_user_group)
-
-        # EXTRAS USER GROUP ROUTES
-        m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms",
-                  action="edit_default_perms", conditions=dict(method=["GET"]))
-        m.connect("edit_user_group_default_perms_update", "/user_groups/{id}/edit/default_perms",
-                  action="update_default_perms", conditions=dict(method=["POST"]))
-
-        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
-                  action="edit_perms", conditions=dict(method=["GET"]))
-        m.connect("edit_user_group_perms_update", "/user_groups/{id}/edit/perms",
-                  action="update_perms", conditions=dict(method=["POST"]))
-        m.connect("edit_user_group_perms_delete", "/user_groups/{id}/edit/perms/delete",
-                  action="delete_perms", conditions=dict(method=["POST"]))
-
-        m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced",
-                  action="edit_advanced", conditions=dict(method=["GET"]))
-
-        m.connect("edit_user_group_members", "/user_groups/{id}/edit/members",
-                  action="edit_members", conditions=dict(method=["GET"]))
-
-    # ADMIN PERMISSIONS ROUTES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/permissions') as m:
-        m.connect("admin_permissions", "/permissions",
-                  action="permission_globals", conditions=dict(method=["POST"]))
-        m.connect("admin_permissions", "/permissions",
-                  action="permission_globals", conditions=dict(method=["GET"]))
-
-        m.connect("admin_permissions_ips", "/permissions/ips",
-                  action="permission_ips", conditions=dict(method=["GET"]))
-
-        m.connect("admin_permissions_perms", "/permissions/perms",
-                  action="permission_perms", conditions=dict(method=["GET"]))
-
-    # ADMIN DEFAULTS ROUTES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/defaults') as m:
-        m.connect('defaults', '/defaults')
-        m.connect('defaults_update', 'defaults/{id}/update',
-                  action="update", conditions=dict(method=["POST"]))
-
-    # ADMIN AUTH SETTINGS
-    rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX,
-                 controller='admin/auth_settings', action='auth_settings',
-                 conditions=dict(method=["POST"]))
-    rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX,
-                 controller='admin/auth_settings')
-
-    # ADMIN SETTINGS ROUTES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/settings') as m:
-        m.connect("admin_settings", "/settings",
-                  action="settings_vcs", conditions=dict(method=["POST"]))
-        m.connect("admin_settings", "/settings",
-                  action="settings_vcs", conditions=dict(method=["GET"]))
-
-        m.connect("admin_settings_mapping", "/settings/mapping",
-                  action="settings_mapping", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_mapping", "/settings/mapping",
-                  action="settings_mapping", conditions=dict(method=["GET"]))
-
-        m.connect("admin_settings_global", "/settings/global",
-                  action="settings_global", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_global", "/settings/global",
-                  action="settings_global", conditions=dict(method=["GET"]))
-
-        m.connect("admin_settings_visual", "/settings/visual",
-                  action="settings_visual", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_visual", "/settings/visual",
-                  action="settings_visual", conditions=dict(method=["GET"]))
-
-        m.connect("admin_settings_email", "/settings/email",
-                  action="settings_email", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_email", "/settings/email",
-                  action="settings_email", conditions=dict(method=["GET"]))
-
-        m.connect("admin_settings_hooks", "/settings/hooks",
-                  action="settings_hooks", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_hooks_delete", "/settings/hooks/delete",
-                  action="settings_hooks", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_hooks", "/settings/hooks",
-                  action="settings_hooks", conditions=dict(method=["GET"]))
-
-        m.connect("admin_settings_search", "/settings/search",
-                  action="settings_search", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_search", "/settings/search",
-                  action="settings_search", conditions=dict(method=["GET"]))
-
-        m.connect("admin_settings_system", "/settings/system",
-                  action="settings_system", conditions=dict(method=["POST"]))
-        m.connect("admin_settings_system", "/settings/system",
-                  action="settings_system", conditions=dict(method=["GET"]))
-
-    # ADMIN MY ACCOUNT
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/my_account') as m:
-
-        m.connect("my_account", "/my_account",
-                  action="my_account", conditions=dict(method=["GET"]))
-        m.connect("my_account", "/my_account",
-                  action="my_account", conditions=dict(method=["POST"]))
-
-        m.connect("my_account_password", "/my_account/password",
-                  action="my_account_password", conditions=dict(method=["GET"]))
-        m.connect("my_account_password", "/my_account/password",
-                  action="my_account_password", conditions=dict(method=["POST"]))
-
-        m.connect("my_account_repos", "/my_account/repos",
-                  action="my_account_repos", conditions=dict(method=["GET"]))
-
-        m.connect("my_account_watched", "/my_account/watched",
-                  action="my_account_watched", conditions=dict(method=["GET"]))
-
-        m.connect("my_account_perms", "/my_account/perms",
-                  action="my_account_perms", conditions=dict(method=["GET"]))
-
-        m.connect("my_account_emails", "/my_account/emails",
-                  action="my_account_emails", conditions=dict(method=["GET"]))
-        m.connect("my_account_emails", "/my_account/emails",
-                  action="my_account_emails_add", conditions=dict(method=["POST"]))
-        m.connect("my_account_emails_delete", "/my_account/emails/delete",
-                  action="my_account_emails_delete", conditions=dict(method=["POST"]))
-
-        m.connect("my_account_api_keys", "/my_account/api_keys",
-                  action="my_account_api_keys", conditions=dict(method=["GET"]))
-        m.connect("my_account_api_keys", "/my_account/api_keys",
-                  action="my_account_api_keys_add", conditions=dict(method=["POST"]))
-        m.connect("my_account_api_keys_delete", "/my_account/api_keys/delete",
-                  action="my_account_api_keys_delete", conditions=dict(method=["POST"]))
-
-        m.connect("my_account_ssh_keys", "/my_account/ssh_keys",
-                  action="my_account_ssh_keys", conditions=dict(method=["GET"]))
-        m.connect("my_account_ssh_keys", "/my_account/ssh_keys",
-                  action="my_account_ssh_keys_add", conditions=dict(method=["POST"]))
-        m.connect("my_account_ssh_keys_delete", "/my_account/ssh_keys/delete",
-                  action="my_account_ssh_keys_delete", conditions=dict(method=["POST"]))
-
-    # ADMIN GIST
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/gists') as m:
-        m.connect("gists", "/gists",
-                  action="create", conditions=dict(method=["POST"]))
-        m.connect("gists", "/gists",
-                  conditions=dict(method=["GET"]))
-        m.connect("new_gist", "/gists/new",
-                  action="new", conditions=dict(method=["GET"]))
-
-        m.connect("gist_delete", "/gists/{gist_id}/delete",
-                  action="delete", conditions=dict(method=["POST"]))
-        m.connect("edit_gist", "/gists/{gist_id}/edit",
-                  action="edit", conditions=dict(method=["GET", "POST"]))
-        m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision",
-                  action="check_revision", conditions=dict(method=["POST"]))
-
-        m.connect("gist", "/gists/{gist_id}",
-                  action="show", conditions=dict(method=["GET"]))
-        m.connect("gist_rev", "/gists/{gist_id}/{revision}",
-                  revision="tip",
-                  action="show", conditions=dict(method=["GET"]))
-        m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}",
-                  revision="tip",
-                  action="show", conditions=dict(method=["GET"]))
-        m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}",
-                  revision='tip',
-                  action="show", conditions=dict(method=["GET"]))
-
-    # ADMIN MAIN PAGES
-    with rmap.submapper(path_prefix=ADMIN_PREFIX,
-                        controller='admin/admin') as m:
-        m.connect('admin_home', '')
-        m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9. _-]*}',
-                  action='add_repo')
-    #==========================================================================
-    # API V2
-    #==========================================================================
-    with rmap.submapper(path_prefix=ADMIN_PREFIX, controller='api/api',
-                        action='_dispatch') as m:
-        m.connect('api', '/api')
-
-    # USER JOURNAL
-    rmap.connect('journal', '%s/journal' % ADMIN_PREFIX,
-                 controller='journal')
-    rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX,
-                 controller='journal', action='journal_rss')
-    rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX,
-                 controller='journal', action='journal_atom')
-
-    rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
-                 controller='journal', action="public_journal")
-
-    rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX,
-                 controller='journal', action="public_journal_rss")
-
-    rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX,
-                 controller='journal', action="public_journal_rss")
-
-    rmap.connect('public_journal_atom',
-                 '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal',
-                 action="public_journal_atom")
-
-    rmap.connect('public_journal_atom_old',
-                 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
-                 action="public_journal_atom")
-
-    rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
-                 controller='journal', action='toggle_following',
-                 conditions=dict(method=["POST"]))
-
-    # SEARCH
-    rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
-    rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX,
-                 controller='search',
-                 conditions=dict(function=check_repo))
-    rmap.connect('search_repo', '/{repo_name:.*?}/search',
-                 controller='search',
-                 conditions=dict(function=check_repo),
-                 )
-
-    # LOGIN/LOGOUT/REGISTER/SIGN IN
-    rmap.connect('session_csrf_secret_token', '%s/session_csrf_secret_token' % ADMIN_PREFIX, controller='login', action='session_csrf_secret_token')
-    rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
-    rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
-                 action='logout')
-
-    rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
-                 action='register')
-
-    rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
-                 controller='login', action='password_reset')
-
-    rmap.connect('reset_password_confirmation',
-                 '%s/password_reset_confirmation' % ADMIN_PREFIX,
-                 controller='login', action='password_reset_confirmation')
-
-    # FEEDS
-    rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss',
-                controller='feed', action='rss',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom',
-                controller='feed', action='atom',
-                conditions=dict(function=check_repo))
-
-    #==========================================================================
-    # REPOSITORY ROUTES
-    #==========================================================================
-    rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating',
-                controller='admin/repos', action='repo_creating')
-    rmap.connect('repo_check_home', '/{repo_name:.*?}/repo_check_creating',
-                controller='admin/repos', action='repo_check')
-
-    rmap.connect('summary_home', '/{repo_name:.*?}',
-                controller='summary',
-                conditions=dict(function=check_repo))
-
-    # must be here for proper group/repo catching
-    rmap.connect('repos_group_home', '/{group_name:.*}',
-                controller='admin/repo_groups', action="show_by_name",
-                conditions=dict(function=check_group))
-    rmap.connect('repo_stats_home', '/{repo_name:.*?}/statistics',
-                controller='summary', action='statistics',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('repo_size', '/{repo_name:.*?}/repo_size',
-                controller='summary', action='repo_size',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('repo_refs_data', '/{repo_name:.*?}/refs-data',
-                 controller='home', action='repo_refs_data')
-
-    rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision:.*}',
-                controller='changeset', revision='tip',
-                conditions=dict(function=check_repo))
-    rmap.connect('changeset_children', '/{repo_name:.*?}/changeset_children/{revision}',
-                controller='changeset', revision='tip', action="changeset_children",
-                conditions=dict(function=check_repo))
-    rmap.connect('changeset_parents', '/{repo_name:.*?}/changeset_parents/{revision}',
-                controller='changeset', revision='tip', action="changeset_parents",
-                conditions=dict(function=check_repo))
-
-    # repo edit options
-    rmap.connect("edit_repo", "/{repo_name:.*?}/settings",
-                 controller='admin/repos', action="edit",
-                 conditions=dict(method=["GET"], function=check_repo))
-
-    rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions",
-                 controller='admin/repos', action="edit_permissions",
-                 conditions=dict(method=["GET"], function=check_repo))
-    rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions",
-                 controller='admin/repos', action="edit_permissions_update",
-                 conditions=dict(method=["POST"], function=check_repo))
-    rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions/delete",
-                 controller='admin/repos', action="edit_permissions_revoke",
-                 conditions=dict(method=["POST"], function=check_repo))
-
-    rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields",
-                 controller='admin/repos', action="edit_fields",
-                 conditions=dict(method=["GET"], function=check_repo))
-    rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new",
-                 controller='admin/repos', action="create_repo_field",
-                 conditions=dict(method=["POST"], function=check_repo))
-    rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}/delete",
-                 controller='admin/repos', action="delete_repo_field",
-                 conditions=dict(method=["POST"], function=check_repo))
-
-    rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced",
-                 controller='admin/repos', action="edit_advanced",
-                 conditions=dict(method=["GET"], function=check_repo))
-
-    rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal",
-                 controller='admin/repos', action="edit_advanced_journal",
-                 conditions=dict(method=["POST"], function=check_repo))
-
-    rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork",
-                 controller='admin/repos', action="edit_advanced_fork",
-                 conditions=dict(method=["POST"], function=check_repo))
-
-    rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
-                 controller='admin/repos', action="edit_remote",
-                 conditions=dict(method=["GET"], function=check_repo))
-    rmap.connect("edit_repo_remote_update", "/{repo_name:.*?}/settings/remote",
-                 controller='admin/repos', action="edit_remote",
-                 conditions=dict(method=["POST"], function=check_repo))
-
-    rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics",
-                 controller='admin/repos', action="edit_statistics",
-                 conditions=dict(method=["GET"], function=check_repo))
-    rmap.connect("edit_repo_statistics_update", "/{repo_name:.*?}/settings/statistics",
-                 controller='admin/repos', action="edit_statistics",
-                 conditions=dict(method=["POST"], function=check_repo))
-
-    # still working url for backward compat.
-    rmap.connect('raw_changeset_home_depraced',
-                 '/{repo_name:.*?}/raw-changeset/{revision}',
-                 controller='changeset', action='changeset_raw',
-                 revision='tip', conditions=dict(function=check_repo))
-
-    ## new URLs
-    rmap.connect('changeset_raw_home',
-                 '/{repo_name:.*?}/changeset-diff/{revision}',
-                 controller='changeset', action='changeset_raw',
-                 revision='tip', conditions=dict(function=check_repo))
-
-    rmap.connect('changeset_patch_home',
-                 '/{repo_name:.*?}/changeset-patch/{revision}',
-                 controller='changeset', action='changeset_patch',
-                 revision='tip', conditions=dict(function=check_repo))
-
-    rmap.connect('changeset_download_home',
-                 '/{repo_name:.*?}/changeset-download/{revision}',
-                 controller='changeset', action='changeset_download',
-                 revision='tip', conditions=dict(function=check_repo))
-
-    rmap.connect('changeset_comment',
-                 '/{repo_name:.*?}/changeset-comment/{revision}',
-                controller='changeset', revision='tip', action='comment',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('changeset_comment_delete',
-                 '/{repo_name:.*?}/changeset-comment/{comment_id}/delete',
-                controller='changeset', action='delete_comment',
-                conditions=dict(function=check_repo, method=["POST"]))
-
-    rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}',
-                 controller='changeset', action='changeset_info')
-
-    rmap.connect('compare_home',
-                 '/{repo_name:.*?}/compare',
-                 controller='compare',
-                 conditions=dict(function=check_repo))
-
-    rmap.connect('compare_url',
-                 '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref_name:.*?}...{other_ref_type}@{other_ref_name:.*?}',
-                 controller='compare', action='compare',
-                 conditions=dict(function=check_repo),
-                 requirements=dict(
-                            org_ref_type='(branch|book|tag|rev|__other_ref_type__)',
-                            other_ref_type='(branch|book|tag|rev|__org_ref_type__)')
-                 )
-
-    rmap.connect('pullrequest_home',
-                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
-                 conditions=dict(function=check_repo,
-                                                 method=["GET"]))
-
-    rmap.connect('pullrequest_repo_info',
-                 '/{repo_name:.*?}/pull-request-repo-info',
-                 controller='pullrequests', action='repo_info',
-                 conditions=dict(function=check_repo, method=["GET"]))
-
-    rmap.connect('pullrequest',
-                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
-                 action='create', conditions=dict(function=check_repo,
-                                                  method=["POST"]))
-
-    rmap.connect('pullrequest_show',
-                 '/{repo_name:.*?}/pull-request/{pull_request_id:\\d+}{extra:(/.*)?}', extra='',
-                 controller='pullrequests',
-                 action='show', conditions=dict(function=check_repo,
-                                                method=["GET"]))
-    rmap.connect('pullrequest_post',
-                 '/{repo_name:.*?}/pull-request/{pull_request_id}',
-                 controller='pullrequests',
-                 action='post', conditions=dict(function=check_repo,
-                                                method=["POST"]))
-    rmap.connect('pullrequest_delete',
-                 '/{repo_name:.*?}/pull-request/{pull_request_id}/delete',
-                 controller='pullrequests',
-                 action='delete', conditions=dict(function=check_repo,
-                                                  method=["POST"]))
-
-    rmap.connect('pullrequest_show_all',
-                 '/{repo_name:.*?}/pull-request',
-                 controller='pullrequests',
-                 action='show_all', conditions=dict(function=check_repo,
-                                                method=["GET"]))
-
-    rmap.connect('my_pullrequests',
-                 '/my_pullrequests',
-                 controller='pullrequests',
-                 action='show_my', conditions=dict(method=["GET"]))
-
-    rmap.connect('pullrequest_comment',
-                 '/{repo_name:.*?}/pull-request-comment/{pull_request_id}',
-                 controller='pullrequests',
-                 action='comment', conditions=dict(function=check_repo,
-                                                method=["POST"]))
-
-    rmap.connect('pullrequest_comment_delete',
-                 '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete',
-                controller='pullrequests', action='delete_comment',
-                conditions=dict(function=check_repo, method=["POST"]))
-
-    rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary',
-                controller='summary', conditions=dict(function=check_repo))
-
-    rmap.connect('changelog_home', '/{repo_name:.*?}/changelog',
-                controller='changelog', conditions=dict(function=check_repo))
-
-    rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}',
-                controller='changelog',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}',
-                controller='changelog', action='changelog_details',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}',
-                controller='files', revision='tip', f_path='',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('files_home_nopath', '/{repo_name:.*?}/files/{revision}',
-                controller='files', revision='tip', f_path='',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('files_history_home',
-                 '/{repo_name:.*?}/history/{revision}/{f_path:.*}',
-                 controller='files', action='history', revision='tip', f_path='',
-                 conditions=dict(function=check_repo))
-
-    rmap.connect('files_authors_home',
-                 '/{repo_name:.*?}/authors/{revision}/{f_path:.*}',
-                 controller='files', action='authors', revision='tip', f_path='',
-                 conditions=dict(function=check_repo))
-
-    rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}',
-                controller='files', action='diff', revision='tip', f_path='',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('files_diff_2way_home', '/{repo_name:.*?}/diff-2way/{f_path:.+}',
-                controller='files', action='diff_2way', revision='tip', f_path='',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('files_rawfile_home',
-                 '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}',
-                 controller='files', action='rawfile', revision='tip',
-                 f_path='', conditions=dict(function=check_repo))
-
-    rmap.connect('files_raw_home',
-                 '/{repo_name:.*?}/raw/{revision}/{f_path:.*}',
-                 controller='files', action='raw', revision='tip', f_path='',
-                 conditions=dict(function=check_repo))
-
-    rmap.connect('files_annotate_home',
-                 '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}',
-                 controller='files', revision='tip',
-                 f_path='', annotate='1', conditions=dict(function=check_repo))
-
-    rmap.connect('files_edit_home',
-                 '/{repo_name:.*?}/edit/{revision}/{f_path:.*}',
-                 controller='files', action='edit', revision='tip',
-                 f_path='', conditions=dict(function=check_repo))
-
-    rmap.connect('files_add_home',
-                 '/{repo_name:.*?}/add/{revision}/{f_path:.*}',
-                 controller='files', action='add', revision='tip',
-                 f_path='', conditions=dict(function=check_repo))
-
-    rmap.connect('files_delete_home',
-                 '/{repo_name:.*?}/delete/{revision}/{f_path:.*}',
-                 controller='files', action='delete', revision='tip',
-                 f_path='', conditions=dict(function=check_repo))
-
-    rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}',
-                controller='files', action='archivefile',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('files_nodelist_home',
-                 '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}',
-                controller='files', action='nodelist',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork',
-                controller='forks', action='fork_create',
-                conditions=dict(function=check_repo, method=["POST"]))
-
-    rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork',
-                controller='forks', action='fork',
-                conditions=dict(function=check_repo))
-
-    rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks',
-                 controller='forks', action='forks',
-                 conditions=dict(function=check_repo))
-
-    rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers',
-                 controller='followers', action='followers',
-                 conditions=dict(function=check_repo))
-
-    return rmap
-
-
-class UrlGenerator(object):
-    """Emulate pylons.url in providing a wrapper around routes.url
-
-    This code was added during migration from Pylons to Turbogears2. Pylons
-    already provided a wrapper like this, but Turbogears2 does not.
-
-    When the routing of Kallithea is changed to use less Routes and more
-    Turbogears2-style routing, this class may disappear or change.
-
-    url() (the __call__ method) returns the URL based on a route name and
-    arguments.
-    url.current() returns the URL of the current page with arguments applied.
-
-    Refer to documentation of Routes for details:
-    https://routes.readthedocs.io/en/latest/generating.html#generation
-    """
-    def __call__(self, *args, **kwargs):
-        return request.environ['routes.url'](*args, **kwargs)
-
-    def current(self, *args, **kwargs):
-        return request.environ['routes.url'].current(*args, **kwargs)
-
-
-url = UrlGenerator()
--- a/kallithea/controllers/admin/admin.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/admin.py	Thu May 27 21:27:37 2021 +0200
@@ -36,12 +36,12 @@
 from whoosh.qparser.dateparse import DateParserPlugin
 from whoosh.qparser.default import QueryParser
 
+from kallithea.controllers import base
 from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
-from kallithea.lib.base import BaseController, render
 from kallithea.lib.indexers import JOURNAL_SCHEMA
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import remove_prefix, remove_suffix, safe_int
-from kallithea.model.db import UserLog
+from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
@@ -77,15 +77,15 @@
     def get_filterion(field, val, term):
 
         if field == 'repository':
-            field = getattr(UserLog, 'repository_name')
+            field = getattr(db.UserLog, 'repository_name')
         elif field == 'ip':
-            field = getattr(UserLog, 'user_ip')
+            field = getattr(db.UserLog, 'user_ip')
         elif field == 'date':
-            field = getattr(UserLog, 'action_date')
+            field = getattr(db.UserLog, 'action_date')
         elif field == 'username':
-            field = getattr(UserLog, 'username')
+            field = getattr(db.UserLog, 'username')
         else:
-            field = getattr(UserLog, field)
+            field = getattr(db.UserLog, field)
         log.debug('filter field: %s val=>%s', field, val)
 
         # sql filtering
@@ -102,6 +102,7 @@
         if not isinstance(qry, query.And):
             qry = [qry]
         for term in qry:
+            assert term is not None, term
             field = term.fieldname
             val = (term.text if not isinstance(term, query.DateRange)
                    else [term.startdate, term.enddate])
@@ -118,7 +119,7 @@
     return user_log
 
 
-class AdminController(BaseController):
+class AdminController(base.BaseController):
 
     @LoginRequired(allow_default_user=True)
     def _before(self, *args, **kwargs):
@@ -126,15 +127,15 @@
 
     @HasPermissionAnyDecorator('hg.admin')
     def index(self):
-        users_log = UserLog.query() \
-                .options(joinedload(UserLog.user)) \
-                .options(joinedload(UserLog.repository))
+        users_log = db.UserLog.query() \
+                .options(joinedload(db.UserLog.user)) \
+                .options(joinedload(db.UserLog.repository))
 
         # FILTERING
         c.search_term = request.GET.get('filter')
         users_log = _journal_filter(users_log, c.search_term)
 
-        users_log = users_log.order_by(UserLog.action_date.desc())
+        users_log = users_log.order_by(db.UserLog.action_date.desc())
 
         p = safe_int(request.GET.get('page'), 1)
 
@@ -142,6 +143,6 @@
                            filter=c.search_term)
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
-            return render('admin/admin_log.html')
+            return base.render('admin/admin_log.html')
 
-        return render('admin/admin.html')
+        return base.render('admin/admin.html')
--- a/kallithea/controllers/admin/auth_settings.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/auth_settings.py	Thu May 27 21:27:37 2021 +0200
@@ -32,20 +32,18 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound
 
-from kallithea.config.routing import url
-from kallithea.lib import auth_modules
-from kallithea.lib import helpers as h
+from kallithea.controllers import base
+from kallithea.lib import auth_modules, webutils
 from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
-from kallithea.lib.base import BaseController, render
-from kallithea.model.db import Setting
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.forms import AuthSettingsForm
-from kallithea.model.meta import Session
 
 
 log = logging.getLogger(__name__)
 
 
-class AuthSettingsController(BaseController):
+class AuthSettingsController(base.BaseController):
 
     @LoginRequired()
     @HasPermissionAnyDecorator('hg.admin')
@@ -77,7 +75,7 @@
                 if "default" in v:
                     c.defaults[fullname] = v["default"]
                 # Current values will be the default on the form, if there are any
-                setting = Setting.get_by_name(fullname)
+                setting = db.Setting.get_by_name(fullname)
                 if setting is not None:
                     c.defaults[fullname] = setting.app_settings_value
         if defaults:
@@ -88,7 +86,7 @@
 
         log.debug('defaults: %s', defaults)
         return formencode.htmlfill.render(
-            render('admin/auth/auth_settings.html'),
+            base.render('admin/auth/auth_settings.html'),
             defaults=c.defaults,
             errors=errors,
             prefix_error=False,
@@ -131,9 +129,9 @@
                     # we want to store it comma separated inside our settings
                     v = ','.join(v)
                 log.debug("%s = %s", k, str(v))
-                setting = Setting.create_or_update(k, v)
-            Session().commit()
-            h.flash(_('Auth settings updated successfully'),
+                setting = db.Setting.create_or_update(k, v)
+            meta.Session().commit()
+            webutils.flash(_('Auth settings updated successfully'),
                        category='success')
         except formencode.Invalid as errors:
             log.error(traceback.format_exc())
@@ -144,7 +142,7 @@
             )
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('error occurred during update of auth settings'),
+            webutils.flash(_('error occurred during update of auth settings'),
                     category='error')
 
         raise HTTPFound(location=url('auth_home'))
--- a/kallithea/controllers/admin/defaults.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/defaults.py	Thu May 27 21:27:37 2021 +0200
@@ -34,19 +34,18 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound
 
-from kallithea.config.routing import url
-from kallithea.lib import helpers as h
+from kallithea.controllers import base
+from kallithea.lib import webutils
 from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
-from kallithea.lib.base import BaseController, render
-from kallithea.model.db import Setting
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.forms import DefaultsForm
-from kallithea.model.meta import Session
 
 
 log = logging.getLogger(__name__)
 
 
-class DefaultsController(BaseController):
+class DefaultsController(base.BaseController):
 
     @LoginRequired()
     @HasPermissionAnyDecorator('hg.admin')
@@ -54,10 +53,10 @@
         super(DefaultsController, self)._before(*args, **kwargs)
 
     def index(self, format='html'):
-        defaults = Setting.get_default_repo_settings()
+        defaults = db.Setting.get_default_repo_settings()
 
         return htmlfill.render(
-            render('admin/defaults/defaults.html'),
+            base.render('admin/defaults/defaults.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False
@@ -69,16 +68,16 @@
         try:
             form_result = _form.to_python(dict(request.POST))
             for k, v in form_result.items():
-                setting = Setting.create_or_update(k, v)
-            Session().commit()
-            h.flash(_('Default settings updated successfully'),
+                setting = db.Setting.create_or_update(k, v)
+            meta.Session().commit()
+            webutils.flash(_('Default settings updated successfully'),
                     category='success')
 
         except formencode.Invalid as errors:
             defaults = errors.value
 
             return htmlfill.render(
-                render('admin/defaults/defaults.html'),
+                base.render('admin/defaults/defaults.html'),
                 defaults=defaults,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -86,7 +85,7 @@
                 force_defaults=False)
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during update of defaults'),
+            webutils.flash(_('Error occurred during update of defaults'),
                     category='error')
 
         raise HTTPFound(location=url('defaults'))
--- a/kallithea/controllers/admin/gists.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/gists.py	Thu May 27 21:27:37 2021 +0200
@@ -35,24 +35,22 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPForbidden, HTTPFound, HTTPNotFound
 
-from kallithea.config.routing import url
-from kallithea.lib import helpers as h
+from kallithea.controllers import base
+from kallithea.lib import auth, webutils
 from kallithea.lib.auth import LoginRequired
-from kallithea.lib.base import BaseController, jsonify, render
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import safe_int, safe_str, time_to_datetime
 from kallithea.lib.vcs.exceptions import NodeNotChangedError, VCSError
-from kallithea.model.db import Gist
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.forms import GistForm
 from kallithea.model.gist import GistModel
-from kallithea.model.meta import Session
 
 
 log = logging.getLogger(__name__)
 
 
-class GistsController(BaseController):
-    """REST Controller styled on the Atom Publishing Protocol"""
+class GistsController(base.BaseController):
 
     def __load_defaults(self, extra_values=None):
         c.lifetime_values = [
@@ -77,34 +75,34 @@
         elif c.show_private:
             url_params['private'] = 1
 
-        gists = Gist().query() \
+        gists = db.Gist().query() \
             .filter_by(is_expired=False) \
-            .order_by(Gist.created_on.desc())
+            .order_by(db.Gist.created_on.desc())
 
         # MY private
         if c.show_private and not c.show_public:
-            gists = gists.filter(Gist.gist_type == Gist.GIST_PRIVATE) \
-                             .filter(Gist.owner_id == request.authuser.user_id)
+            gists = gists.filter(db.Gist.gist_type == db.Gist.GIST_PRIVATE) \
+                             .filter(db.Gist.owner_id == request.authuser.user_id)
         # MY public
         elif c.show_public and not c.show_private:
-            gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC) \
-                             .filter(Gist.owner_id == request.authuser.user_id)
+            gists = gists.filter(db.Gist.gist_type == db.Gist.GIST_PUBLIC) \
+                             .filter(db.Gist.owner_id == request.authuser.user_id)
 
         # MY public+private
         elif c.show_private and c.show_public:
-            gists = gists.filter(or_(Gist.gist_type == Gist.GIST_PUBLIC,
-                                     Gist.gist_type == Gist.GIST_PRIVATE)) \
-                             .filter(Gist.owner_id == request.authuser.user_id)
+            gists = gists.filter(or_(db.Gist.gist_type == db.Gist.GIST_PUBLIC,
+                                     db.Gist.gist_type == db.Gist.GIST_PRIVATE)) \
+                             .filter(db.Gist.owner_id == request.authuser.user_id)
 
         # default show ALL public gists
         if not c.show_public and not c.show_private:
-            gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC)
+            gists = gists.filter(db.Gist.gist_type == db.Gist.GIST_PUBLIC)
 
         c.gists = gists
         p = safe_int(request.GET.get('page'), 1)
         c.gists_pager = Page(c.gists, page=p, items_per_page=10,
                              **url_params)
-        return render('admin/gists/index.html')
+        return base.render('admin/gists/index.html')
 
     @LoginRequired()
     def create(self):
@@ -113,7 +111,7 @@
         try:
             form_result = gist_form.to_python(dict(request.POST))
             # TODO: multiple files support, from the form
-            filename = form_result['filename'] or Gist.DEFAULT_FILENAME
+            filename = form_result['filename'] or db.Gist.DEFAULT_FILENAME
             nodes = {
                 filename: {
                     'content': form_result['content'],
@@ -121,7 +119,7 @@
                 }
             }
             _public = form_result['public']
-            gist_type = Gist.GIST_PUBLIC if _public else Gist.GIST_PRIVATE
+            gist_type = db.Gist.GIST_PUBLIC if _public else db.Gist.GIST_PRIVATE
             gist = GistModel().create(
                 description=form_result['description'],
                 owner=request.authuser.user_id,
@@ -130,13 +128,13 @@
                 gist_type=gist_type,
                 lifetime=form_result['lifetime']
             )
-            Session().commit()
+            meta.Session().commit()
             new_gist_id = gist.gist_access_id
         except formencode.Invalid as errors:
             defaults = errors.value
 
             return formencode.htmlfill.render(
-                render('admin/gists/new.html'),
+                base.render('admin/gists/new.html'),
                 defaults=defaults,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -145,23 +143,23 @@
 
         except Exception as e:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during gist creation'), category='error')
+            webutils.flash(_('Error occurred during gist creation'), category='error')
             raise HTTPFound(location=url('new_gist'))
         raise HTTPFound(location=url('gist', gist_id=new_gist_id))
 
     @LoginRequired()
     def new(self, format='html'):
         self.__load_defaults()
-        return render('admin/gists/new.html')
+        return base.render('admin/gists/new.html')
 
     @LoginRequired()
     def delete(self, gist_id):
         gist = GistModel().get_gist(gist_id)
         owner = gist.owner_id == request.authuser.user_id
-        if h.HasPermissionAny('hg.admin')() or owner:
+        if auth.HasPermissionAny('hg.admin')() or owner:
             GistModel().delete(gist)
-            Session().commit()
-            h.flash(_('Deleted gist %s') % gist.gist_access_id, category='success')
+            meta.Session().commit()
+            webutils.flash(_('Deleted gist %s') % gist.gist_access_id, category='success')
         else:
             raise HTTPForbidden()
 
@@ -169,7 +167,7 @@
 
     @LoginRequired(allow_default_user=True)
     def show(self, gist_id, revision='tip', format='html', f_path=None):
-        c.gist = Gist.get_or_404(gist_id)
+        c.gist = db.Gist.get_or_404(gist_id)
 
         if c.gist.is_expired:
             log.error('Gist expired at %s',
@@ -188,11 +186,11 @@
             )
             response.content_type = 'text/plain'
             return content
-        return render('admin/gists/show.html')
+        return base.render('admin/gists/show.html')
 
     @LoginRequired()
     def edit(self, gist_id, format='html'):
-        c.gist = Gist.get_or_404(gist_id)
+        c.gist = db.Gist.get_or_404(gist_id)
 
         if c.gist.is_expired:
             log.error('Gist expired at %s',
@@ -205,7 +203,7 @@
             raise HTTPNotFound()
 
         self.__load_defaults(extra_values=('0', _('Unmodified')))
-        rendered = render('admin/gists/edit.html')
+        rendered = base.render('admin/gists/edit.html')
 
         if request.POST:
             rpost = request.POST
@@ -233,16 +231,16 @@
                     lifetime=rpost['lifetime']
                 )
 
-                Session().commit()
-                h.flash(_('Successfully updated gist content'), category='success')
+                meta.Session().commit()
+                webutils.flash(_('Successfully updated gist content'), category='success')
             except NodeNotChangedError:
                 # raised if nothing was changed in repo itself. We anyway then
                 # store only DB stuff for gist
-                Session().commit()
-                h.flash(_('Successfully updated gist data'), category='success')
+                meta.Session().commit()
+                webutils.flash(_('Successfully updated gist data'), category='success')
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during update of gist %s') % gist_id,
+                webutils.flash(_('Error occurred during update of gist %s') % gist_id,
                         category='error')
 
             raise HTTPFound(location=url('gist', gist_id=gist_id))
@@ -250,9 +248,9 @@
         return rendered
 
     @LoginRequired()
-    @jsonify
+    @base.jsonify
     def check_revision(self, gist_id):
-        c.gist = Gist.get_or_404(gist_id)
+        c.gist = db.Gist.get_or_404(gist_id)
         last_rev = c.gist.scm_instance.get_changeset()
         success = True
         revision = request.POST.get('revision')
--- a/kallithea/controllers/admin/my_account.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/my_account.py	Thu May 27 21:27:37 2021 +0200
@@ -35,16 +35,14 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound
 
-from kallithea.config.routing import url
-from kallithea.lib import auth_modules
-from kallithea.lib import helpers as h
+from kallithea.controllers import base
+from kallithea.lib import auth_modules, webutils
 from kallithea.lib.auth import AuthUser, LoginRequired
-from kallithea.lib.base import BaseController, IfSshEnabled, render
 from kallithea.lib.utils2 import generate_api_key, safe_int
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.api_key import ApiKeyModel
-from kallithea.model.db import Repository, User, UserEmailMap, UserFollowing
 from kallithea.model.forms import PasswordChangeForm, UserForm
-from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException
 from kallithea.model.user import UserModel
@@ -53,35 +51,30 @@
 log = logging.getLogger(__name__)
 
 
-class MyAccountController(BaseController):
-    """REST Controller styled on the Atom Publishing Protocol"""
-    # To properly map this controller, ensure your config/routing.py
-    # file has a resource setup:
-    #     map.resource('setting', 'settings', controller='admin/settings',
-    #         path_prefix='/admin', name_prefix='admin_')
+class MyAccountController(base.BaseController):
 
     @LoginRequired()
     def _before(self, *args, **kwargs):
         super(MyAccountController, self)._before(*args, **kwargs)
 
     def __load_data(self):
-        c.user = User.get(request.authuser.user_id)
+        c.user = db.User.get(request.authuser.user_id)
         if c.user.is_default_user:
-            h.flash(_("You can't edit this user since it's"
+            webutils.flash(_("You can't edit this user since it's"
                       " crucial for entire application"), category='warning')
             raise HTTPFound(location=url('users'))
 
     def _load_my_repos_data(self, watched=False):
         if watched:
             admin = False
-            repos_list = Session().query(Repository) \
-                         .join(UserFollowing) \
-                         .filter(UserFollowing.user_id ==
+            repos_list = meta.Session().query(db.Repository) \
+                         .join(db.UserFollowing) \
+                         .filter(db.UserFollowing.user_id ==
                                  request.authuser.user_id).all()
         else:
             admin = True
-            repos_list = Session().query(Repository) \
-                         .filter(Repository.owner_id ==
+            repos_list = meta.Session().query(db.Repository) \
+                         .filter(db.Repository.owner_id ==
                                  request.authuser.user_id).all()
 
         return RepoModel().get_repos_as_dict(repos_list, admin=admin)
@@ -91,7 +84,7 @@
         self.__load_data()
         c.perm_user = AuthUser(user_id=request.authuser.user_id)
         managed_fields = auth_modules.get_managed_fields(c.user)
-        def_user_perms = AuthUser(dbuser=User.get_default_user()).permissions['global']
+        def_user_perms = AuthUser(dbuser=db.User.get_default_user()).global_permissions
         if 'hg.register.none' in def_user_perms:
             managed_fields.extend(['username', 'firstname', 'lastname', 'email'])
 
@@ -116,14 +109,14 @@
 
                 UserModel().update(request.authuser.user_id, form_result,
                                    skip_attrs=skip_attrs)
-                h.flash(_('Your account was updated successfully'),
+                webutils.flash(_('Your account was updated successfully'),
                         category='success')
-                Session().commit()
+                meta.Session().commit()
                 update = True
 
             except formencode.Invalid as errors:
                 return htmlfill.render(
-                    render('admin/my_account/my_account.html'),
+                    base.render('admin/my_account/my_account.html'),
                     defaults=errors.value,
                     errors=errors.error_dict or {},
                     prefix_error=False,
@@ -131,12 +124,12 @@
                     force_defaults=False)
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during update of user %s')
+                webutils.flash(_('Error occurred during update of user %s')
                         % form_result.get('username'), category='error')
         if update:
             raise HTTPFound(location='my_account')
         return htmlfill.render(
-            render('admin/my_account/my_account.html'),
+            base.render('admin/my_account/my_account.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -153,11 +146,11 @@
             try:
                 form_result = _form.to_python(request.POST)
                 UserModel().update(request.authuser.user_id, form_result)
-                Session().commit()
-                h.flash(_("Successfully updated password"), category='success')
+                meta.Session().commit()
+                webutils.flash(_("Successfully updated password"), category='success')
             except formencode.Invalid as errors:
                 return htmlfill.render(
-                    render('admin/my_account/my_account.html'),
+                    base.render('admin/my_account/my_account.html'),
                     defaults=errors.value,
                     errors=errors.error_dict or {},
                     prefix_error=False,
@@ -165,9 +158,9 @@
                     force_defaults=False)
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during update of user password'),
+                webutils.flash(_('Error occurred during update of user password'),
                         category='error')
-        return render('admin/my_account/my_account.html')
+        return base.render('admin/my_account/my_account.html')
 
     def my_account_repos(self):
         c.active = 'repos'
@@ -175,7 +168,7 @@
 
         # data used to render the grid
         c.data = self._load_my_repos_data()
-        return render('admin/my_account/my_account.html')
+        return base.render('admin/my_account/my_account.html')
 
     def my_account_watched(self):
         c.active = 'watched'
@@ -183,36 +176,36 @@
 
         # data used to render the grid
         c.data = self._load_my_repos_data(watched=True)
-        return render('admin/my_account/my_account.html')
+        return base.render('admin/my_account/my_account.html')
 
     def my_account_perms(self):
         c.active = 'perms'
         self.__load_data()
         c.perm_user = AuthUser(user_id=request.authuser.user_id)
 
-        return render('admin/my_account/my_account.html')
+        return base.render('admin/my_account/my_account.html')
 
     def my_account_emails(self):
         c.active = 'emails'
         self.__load_data()
 
-        c.user_email_map = UserEmailMap.query() \
-            .filter(UserEmailMap.user == c.user).all()
-        return render('admin/my_account/my_account.html')
+        c.user_email_map = db.UserEmailMap.query() \
+            .filter(db.UserEmailMap.user == c.user).all()
+        return base.render('admin/my_account/my_account.html')
 
     def my_account_emails_add(self):
         email = request.POST.get('new_email')
 
         try:
             UserModel().add_extra_email(request.authuser.user_id, email)
-            Session().commit()
-            h.flash(_("Added email %s to user") % email, category='success')
+            meta.Session().commit()
+            webutils.flash(_("Added email %s to user") % email, category='success')
         except formencode.Invalid as error:
             msg = error.error_dict['email']
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during email saving'),
+            webutils.flash(_('An error occurred during email saving'),
                     category='error')
         raise HTTPFound(location=url('my_account_emails'))
 
@@ -220,8 +213,8 @@
         email_id = request.POST.get('del_email_id')
         user_model = UserModel()
         user_model.delete_extra_email(request.authuser.user_id, email_id)
-        Session().commit()
-        h.flash(_("Removed email from user"), category='success')
+        meta.Session().commit()
+        webutils.flash(_("Removed email from user"), category='success')
         raise HTTPFound(location=url('my_account_emails'))
 
     def my_account_api_keys(self):
@@ -238,59 +231,59 @@
         c.lifetime_options = [(c.lifetime_values, _("Lifetime"))]
         c.user_api_keys = ApiKeyModel().get_api_keys(request.authuser.user_id,
                                                      show_expired=show_expired)
-        return render('admin/my_account/my_account.html')
+        return base.render('admin/my_account/my_account.html')
 
     def my_account_api_keys_add(self):
         lifetime = safe_int(request.POST.get('lifetime'), -1)
         description = request.POST.get('description')
         ApiKeyModel().create(request.authuser.user_id, description, lifetime)
-        Session().commit()
-        h.flash(_("API key successfully created"), category='success')
+        meta.Session().commit()
+        webutils.flash(_("API key successfully created"), category='success')
         raise HTTPFound(location=url('my_account_api_keys'))
 
     def my_account_api_keys_delete(self):
         api_key = request.POST.get('del_api_key')
         if request.POST.get('del_api_key_builtin'):
-            user = User.get(request.authuser.user_id)
+            user = db.User.get(request.authuser.user_id)
             user.api_key = generate_api_key()
-            Session().commit()
-            h.flash(_("API key successfully reset"), category='success')
+            meta.Session().commit()
+            webutils.flash(_("API key successfully reset"), category='success')
         elif api_key:
             ApiKeyModel().delete(api_key, request.authuser.user_id)
-            Session().commit()
-            h.flash(_("API key successfully deleted"), category='success')
+            meta.Session().commit()
+            webutils.flash(_("API key successfully deleted"), category='success')
 
         raise HTTPFound(location=url('my_account_api_keys'))
 
-    @IfSshEnabled
+    @base.IfSshEnabled
     def my_account_ssh_keys(self):
         c.active = 'ssh_keys'
         self.__load_data()
         c.user_ssh_keys = SshKeyModel().get_ssh_keys(request.authuser.user_id)
-        return render('admin/my_account/my_account.html')
+        return base.render('admin/my_account/my_account.html')
 
-    @IfSshEnabled
+    @base.IfSshEnabled
     def my_account_ssh_keys_add(self):
         description = request.POST.get('description')
         public_key = request.POST.get('public_key')
         try:
             new_ssh_key = SshKeyModel().create(request.authuser.user_id,
                                                description, public_key)
-            Session().commit()
+            meta.Session().commit()
             SshKeyModel().write_authorized_keys()
-            h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
+            webutils.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
         except SshKeyModelException as e:
-            h.flash(e.args[0], category='error')
+            webutils.flash(e.args[0], category='error')
         raise HTTPFound(location=url('my_account_ssh_keys'))
 
-    @IfSshEnabled
+    @base.IfSshEnabled
     def my_account_ssh_keys_delete(self):
         fingerprint = request.POST.get('del_public_key_fingerprint')
         try:
             SshKeyModel().delete(fingerprint, request.authuser.user_id)
-            Session().commit()
+            meta.Session().commit()
             SshKeyModel().write_authorized_keys()
-            h.flash(_("SSH key successfully deleted"), category='success')
+            webutils.flash(_("SSH key successfully deleted"), category='success')
         except SshKeyModelException as e:
-            h.flash(e.args[0], category='error')
+            webutils.flash(e.args[0], category='error')
         raise HTTPFound(location=url('my_account_ssh_keys'))
--- a/kallithea/controllers/admin/permissions.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/permissions.py	Thu May 27 21:27:37 2021 +0200
@@ -36,24 +36,19 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound
 
-from kallithea.config.routing import url
-from kallithea.lib import helpers as h
+from kallithea.controllers import base
+from kallithea.lib import webutils
 from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired
-from kallithea.lib.base import BaseController, render
-from kallithea.model.db import User, UserIpMap
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.forms import DefaultPermissionsForm
-from kallithea.model.meta import Session
 from kallithea.model.permission import PermissionModel
 
 
 log = logging.getLogger(__name__)
 
 
-class PermissionsController(BaseController):
-    """REST Controller styled on the Atom Publishing Protocol"""
-    # To properly map this controller, ensure your config/routing.py
-    # file has a resource setup:
-    #     map.resource('permission', 'permissions')
+class PermissionsController(base.BaseController):
 
     @LoginRequired()
     @HasPermissionAnyDecorator('hg.admin')
@@ -61,18 +56,22 @@
         super(PermissionsController, self)._before(*args, **kwargs)
 
     def __load_data(self):
+        # Permissions for the Default user on new repositories
         c.repo_perms_choices = [('repository.none', _('None'),),
                                    ('repository.read', _('Read'),),
                                    ('repository.write', _('Write'),),
                                    ('repository.admin', _('Admin'),)]
+        # Permissions for the Default user on new repository groups
         c.group_perms_choices = [('group.none', _('None'),),
                                  ('group.read', _('Read'),),
                                  ('group.write', _('Write'),),
                                  ('group.admin', _('Admin'),)]
+        # Permissions for the Default user on new user groups
         c.user_group_perms_choices = [('usergroup.none', _('None'),),
                                       ('usergroup.read', _('Read'),),
                                       ('usergroup.write', _('Write'),),
                                       ('usergroup.admin', _('Admin'),)]
+        # Registration - allow new Users to create an account
         c.register_choices = [
             ('hg.register.none',
                 _('Disabled')),
@@ -80,26 +79,18 @@
                 _('Allowed with manual account activation')),
             ('hg.register.auto_activate',
                 _('Allowed with automatic account activation')), ]
-
+        # External auth account activation
         c.extern_activate_choices = [
             ('hg.extern_activate.manual', _('Manual activation of external account')),
             ('hg.extern_activate.auto', _('Automatic activation of external account')),
         ]
-
+        # Top level repository creation
         c.repo_create_choices = [('hg.create.none', _('Disabled')),
                                  ('hg.create.repository', _('Enabled'))]
-
-        c.repo_create_on_write_choices = [
-            ('hg.create.write_on_repogroup.true', _('Enabled')),
-            ('hg.create.write_on_repogroup.false', _('Disabled')),
-        ]
-
+        # User group creation
         c.user_group_create_choices = [('hg.usergroup.create.false', _('Disabled')),
                                        ('hg.usergroup.create.true', _('Enabled'))]
-
-        c.repo_group_create_choices = [('hg.repogroup.create.false', _('Disabled')),
-                                       ('hg.repogroup.create.true', _('Enabled'))]
-
+        # Repository forking:
         c.fork_choices = [('hg.fork.none', _('Disabled')),
                           ('hg.fork.repository', _('Enabled'))]
 
@@ -112,8 +103,6 @@
                 [x[0] for x in c.group_perms_choices],
                 [x[0] for x in c.user_group_perms_choices],
                 [x[0] for x in c.repo_create_choices],
-                [x[0] for x in c.repo_create_on_write_choices],
-                [x[0] for x in c.repo_group_create_choices],
                 [x[0] for x in c.user_group_create_choices],
                 [x[0] for x in c.fork_choices],
                 [x[0] for x in c.register_choices],
@@ -123,15 +112,15 @@
                 form_result = _form.to_python(dict(request.POST))
                 form_result.update({'perm_user_name': 'default'})
                 PermissionModel().update(form_result)
-                Session().commit()
-                h.flash(_('Global permissions updated successfully'),
+                meta.Session().commit()
+                webutils.flash(_('Global permissions updated successfully'),
                         category='success')
 
             except formencode.Invalid as errors:
                 defaults = errors.value
 
                 return htmlfill.render(
-                    render('admin/permissions/permissions.html'),
+                    base.render('admin/permissions/permissions.html'),
                     defaults=defaults,
                     errors=errors.error_dict or {},
                     prefix_error=False,
@@ -139,12 +128,12 @@
                     force_defaults=False)
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during update of permissions'),
+                webutils.flash(_('Error occurred during update of permissions'),
                         category='error')
 
             raise HTTPFound(location=url('admin_permissions'))
 
-        c.user = User.get_default_user()
+        c.user = db.User.get_default_user()
         defaults = {'anonymous': c.user.active}
 
         for p in c.user.user_perms:
@@ -157,15 +146,9 @@
             if p.permission.permission_name.startswith('usergroup.'):
                 defaults['default_user_group_perm'] = p.permission.permission_name
 
-            if p.permission.permission_name.startswith('hg.create.write_on_repogroup.'):
-                defaults['create_on_write'] = p.permission.permission_name
-
             elif p.permission.permission_name.startswith('hg.create.'):
                 defaults['default_repo_create'] = p.permission.permission_name
 
-            if p.permission.permission_name.startswith('hg.repogroup.'):
-                defaults['default_repo_group_create'] = p.permission.permission_name
-
             if p.permission.permission_name.startswith('hg.usergroup.'):
                 defaults['default_user_group_create'] = p.permission.permission_name
 
@@ -179,21 +162,21 @@
                 defaults['default_fork'] = p.permission.permission_name
 
         return htmlfill.render(
-            render('admin/permissions/permissions.html'),
+            base.render('admin/permissions/permissions.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
 
     def permission_ips(self):
         c.active = 'ips'
-        c.user = User.get_default_user()
-        c.user_ip_map = UserIpMap.query() \
-                        .filter(UserIpMap.user == c.user).all()
+        c.user = db.User.get_default_user()
+        c.user_ip_map = db.UserIpMap.query() \
+                        .filter(db.UserIpMap.user == c.user).all()
 
-        return render('admin/permissions/permissions.html')
+        return base.render('admin/permissions/permissions.html')
 
     def permission_perms(self):
         c.active = 'perms'
-        c.user = User.get_default_user()
+        c.user = db.User.get_default_user()
         c.perm_user = AuthUser(dbuser=c.user)
-        return render('admin/permissions/permissions.html')
+        return base.render('admin/permissions/permissions.html')
--- a/kallithea/controllers/admin/repo_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/repo_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -36,14 +36,13 @@
 from tg.i18n import ungettext
 from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound
 
-from kallithea.config.routing import url
-from kallithea.lib import helpers as h
+from kallithea.controllers import base
+from kallithea.lib import webutils
 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoGroupPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseController, render
 from kallithea.lib.utils2 import safe_int
-from kallithea.model.db import RepoGroup, Repository
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.forms import RepoGroupForm, RepoGroupPermsForm
-from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.scm import AvailableRepoGroupChoices, RepoGroupList
@@ -52,7 +51,7 @@
 log = logging.getLogger(__name__)
 
 
-class RepoGroupsController(BaseController):
+class RepoGroupsController(base.BaseController):
 
     @LoginRequired(allow_default_user=True)
     def _before(self, *args, **kwargs):
@@ -63,7 +62,7 @@
         exclude is used for not moving group to itself TODO: also exclude descendants
         Note: only admin can create top level groups
         """
-        repo_groups = AvailableRepoGroupChoices([], 'admin', extras)
+        repo_groups = AvailableRepoGroupChoices('admin', extras)
         exclude_group_ids = set(rg.group_id for rg in exclude)
         c.repo_groups = [rg for rg in repo_groups
                          if rg[0] not in exclude_group_ids]
@@ -74,7 +73,7 @@
 
         :param group_id:
         """
-        repo_group = RepoGroup.get_or_404(group_id)
+        repo_group = db.RepoGroup.get_or_404(group_id)
         data = repo_group.get_dict()
         data['group_name'] = repo_group.name
 
@@ -98,7 +97,7 @@
         return False
 
     def index(self, format='html'):
-        _list = RepoGroup.query(sorted=True).all()
+        _list = db.RepoGroup.query(sorted=True).all()
         group_iter = RepoGroupList(_list, perm_level='admin')
         repo_groups_data = []
         _tmpl_lookup = app_globals.mako_lookup
@@ -106,22 +105,22 @@
 
         def repo_group_name(repo_group_name, children_groups):
             return template.get_def("repo_group_name") \
-                .render_unicode(repo_group_name, children_groups, _=_, h=h, c=c)
+                .render_unicode(repo_group_name, children_groups, _=_, webutils=webutils, c=c)
 
         def repo_group_actions(repo_group_id, repo_group_name, gr_count):
             return template.get_def("repo_group_actions") \
-                .render_unicode(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c,
+                .render_unicode(repo_group_id, repo_group_name, gr_count, _=_, webutils=webutils, c=c,
                         ungettext=ungettext)
 
         for repo_gr in group_iter:
             children_groups = [g.name for g in repo_gr.parents] + [repo_gr.name]
             repo_count = repo_gr.repositories.count()
             repo_groups_data.append({
-                "raw_name": h.escape(repo_gr.group_name),
+                "raw_name": webutils.escape(repo_gr.group_name),
                 "group_name": repo_group_name(repo_gr.group_name, children_groups),
-                "desc": h.escape(repo_gr.group_description),
+                "desc": webutils.escape(repo_gr.group_description),
                 "repos": repo_count,
-                "owner": h.person(repo_gr.owner),
+                "owner": repo_gr.owner.username,
                 "action": repo_group_actions(repo_gr.group_id, repo_gr.group_name,
                                              repo_count)
             })
@@ -132,7 +131,7 @@
             "records": repo_groups_data
         }
 
-        return render('admin/repo_groups/repo_groups.html')
+        return base.render('admin/repo_groups/repo_groups.html')
 
     def create(self):
         self.__load_defaults()
@@ -150,11 +149,11 @@
                 owner=request.authuser.user_id, # TODO: make editable
                 copy_permissions=form_result['group_copy_permissions']
             )
-            Session().commit()
+            meta.Session().commit()
             # TODO: in future action_logger(, '', '', '')
         except formencode.Invalid as errors:
             return htmlfill.render(
-                render('admin/repo_groups/repo_group_add.html'),
+                base.render('admin/repo_groups/repo_group_add.html'),
                 defaults=errors.value,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -162,14 +161,14 @@
                 force_defaults=False)
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during creation of repository group %s')
+            webutils.flash(_('Error occurred during creation of repository group %s')
                     % request.POST.get('group_name'), category='error')
             if form_result is None:
                 raise
             parent_group_id = form_result['parent_group_id']
             # TODO: maybe we should get back to the main view, not the admin one
             raise HTTPFound(location=url('repos_groups', parent_group=parent_group_id))
-        h.flash(_('Created repository group %s') % gr.group_name,
+        webutils.flash(_('Created repository group %s') % gr.group_name,
                 category='success')
         raise HTTPFound(location=url('repos_group_home', group_name=gr.group_name))
 
@@ -181,7 +180,7 @@
         else:
             # we pass in parent group into creation form, thus we know
             # what would be the group, we can check perms here !
-            group = RepoGroup.get(parent_group_id) if parent_group_id else None
+            group = db.RepoGroup.get(parent_group_id) if parent_group_id else None
             group_name = group.group_name if group else None
             if HasRepoGroupPermissionLevel('admin')(group_name, 'group create'):
                 pass
@@ -190,7 +189,7 @@
 
         self.__load_defaults()
         return htmlfill.render(
-            render('admin/repo_groups/repo_group_add.html'),
+            base.render('admin/repo_groups/repo_group_add.html'),
             defaults={'parent_group_id': parent_group_id},
             errors={},
             prefix_error=False,
@@ -199,7 +198,7 @@
 
     @HasRepoGroupPermissionLevelDecorator('admin')
     def update(self, group_name):
-        c.repo_group = RepoGroup.guess_instance(group_name)
+        c.repo_group = db.RepoGroup.guess_instance(group_name)
         self.__load_defaults(extras=[c.repo_group.parent_group],
                              exclude=[c.repo_group])
 
@@ -221,8 +220,8 @@
             form_result = repo_group_form.to_python(dict(request.POST))
 
             new_gr = RepoGroupModel().update(group_name, form_result)
-            Session().commit()
-            h.flash(_('Updated repository group %s')
+            meta.Session().commit()
+            webutils.flash(_('Updated repository group %s')
                     % form_result['group_name'], category='success')
             # we now have new name !
             group_name = new_gr.group_name
@@ -230,7 +229,7 @@
         except formencode.Invalid as errors:
             c.active = 'settings'
             return htmlfill.render(
-                render('admin/repo_groups/repo_group_edit.html'),
+                base.render('admin/repo_groups/repo_group_edit.html'),
                 defaults=errors.value,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -238,35 +237,35 @@
                 force_defaults=False)
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during update of repository group %s')
+            webutils.flash(_('Error occurred during update of repository group %s')
                     % request.POST.get('group_name'), category='error')
 
         raise HTTPFound(location=url('edit_repo_group', group_name=group_name))
 
     @HasRepoGroupPermissionLevelDecorator('admin')
     def delete(self, group_name):
-        gr = c.repo_group = RepoGroup.guess_instance(group_name)
+        gr = c.repo_group = db.RepoGroup.guess_instance(group_name)
         repos = gr.repositories.all()
         if repos:
-            h.flash(_('This group contains %s repositories and cannot be '
+            webutils.flash(_('This group contains %s repositories and cannot be '
                       'deleted') % len(repos), category='warning')
             raise HTTPFound(location=url('repos_groups'))
 
         children = gr.children.all()
         if children:
-            h.flash(_('This group contains %s subgroups and cannot be deleted'
+            webutils.flash(_('This group contains %s subgroups and cannot be deleted'
                       % (len(children))), category='warning')
             raise HTTPFound(location=url('repos_groups'))
 
         try:
             RepoGroupModel().delete(group_name)
-            Session().commit()
-            h.flash(_('Removed repository group %s') % group_name,
+            meta.Session().commit()
+            webutils.flash(_('Removed repository group %s') % group_name,
                     category='success')
             # TODO: in future action_logger(, '', '', '')
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during deletion of repository group %s')
+            webutils.flash(_('Error occurred during deletion of repository group %s')
                     % group_name, category='error')
 
         if gr.parent_group:
@@ -279,7 +278,7 @@
         the group by id view instead
         """
         group_name = group_name.rstrip('/')
-        id_ = RepoGroup.get_by_group_name(group_name)
+        id_ = db.RepoGroup.get_by_group_name(group_name)
         if id_:
             return self.show(group_name)
         raise HTTPNotFound
@@ -288,29 +287,29 @@
     def show(self, group_name):
         c.active = 'settings'
 
-        c.group = c.repo_group = RepoGroup.guess_instance(group_name)
+        c.group = c.repo_group = db.RepoGroup.guess_instance(group_name)
 
-        groups = RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all()
+        groups = db.RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all()
         repo_groups_list = self.scm_model.get_repo_groups(groups)
 
-        repos_list = Repository.query(sorted=True).filter_by(group=c.group).all()
+        repos_list = db.Repository.query(sorted=True).filter_by(group=c.group).all()
         c.data = RepoModel().get_repos_as_dict(repos_list,
                                                repo_groups_list=repo_groups_list,
                                                short_name=True)
 
-        return render('admin/repo_groups/repo_group_show.html')
+        return base.render('admin/repo_groups/repo_group_show.html')
 
     @HasRepoGroupPermissionLevelDecorator('admin')
     def edit(self, group_name):
         c.active = 'settings'
 
-        c.repo_group = RepoGroup.guess_instance(group_name)
+        c.repo_group = db.RepoGroup.guess_instance(group_name)
         self.__load_defaults(extras=[c.repo_group.parent_group],
                              exclude=[c.repo_group])
         defaults = self.__load_data(c.repo_group.group_id)
 
         return htmlfill.render(
-            render('admin/repo_groups/repo_group_edit.html'),
+            base.render('admin/repo_groups/repo_group_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False
@@ -319,19 +318,19 @@
     @HasRepoGroupPermissionLevelDecorator('admin')
     def edit_repo_group_advanced(self, group_name):
         c.active = 'advanced'
-        c.repo_group = RepoGroup.guess_instance(group_name)
+        c.repo_group = db.RepoGroup.guess_instance(group_name)
 
-        return render('admin/repo_groups/repo_group_edit.html')
+        return base.render('admin/repo_groups/repo_group_edit.html')
 
     @HasRepoGroupPermissionLevelDecorator('admin')
     def edit_repo_group_perms(self, group_name):
         c.active = 'perms'
-        c.repo_group = RepoGroup.guess_instance(group_name)
+        c.repo_group = db.RepoGroup.guess_instance(group_name)
         self.__load_defaults()
         defaults = self.__load_data(c.repo_group.group_id)
 
         return htmlfill.render(
-            render('admin/repo_groups/repo_group_edit.html'),
+            base.render('admin/repo_groups/repo_group_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False
@@ -345,13 +344,13 @@
         :param group_name:
         """
 
-        c.repo_group = RepoGroup.guess_instance(group_name)
+        c.repo_group = db.RepoGroup.guess_instance(group_name)
         valid_recursive_choices = ['none', 'repos', 'groups', 'all']
         form_result = RepoGroupPermsForm(valid_recursive_choices)().to_python(request.POST)
         if not request.authuser.is_admin:
             if self._revoke_perms_on_yourself(form_result):
                 msg = _('Cannot revoke permission for yourself as admin')
-                h.flash(msg, category='warning')
+                webutils.flash(msg, category='warning')
                 raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name))
         recursive = form_result['recursive']
         # iterate over all members(if in recursive mode) of this groups and
@@ -364,8 +363,8 @@
         # TODO: implement this
         #action_logger(request.authuser, 'admin_changed_repo_permissions',
         #              repo_name, request.ip_addr)
-        Session().commit()
-        h.flash(_('Repository group permissions updated'), category='success')
+        meta.Session().commit()
+        webutils.flash(_('Repository group permissions updated'), category='success')
         raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name))
 
     @HasRepoGroupPermissionLevelDecorator('admin')
@@ -381,7 +380,7 @@
             if not request.authuser.is_admin:
                 if obj_type == 'user' and request.authuser.user_id == obj_id:
                     msg = _('Cannot revoke permission for yourself as admin')
-                    h.flash(msg, category='warning')
+                    webutils.flash(msg, category='warning')
                     raise Exception('revoke admin permission on self')
             recursive = request.POST.get('recursive', 'none')
             if obj_type == 'user':
@@ -394,9 +393,9 @@
                                                    obj_type='user_group',
                                                    recursive=recursive)
 
-            Session().commit()
+            meta.Session().commit()
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during revoking of permission'),
+            webutils.flash(_('An error occurred during revoking of permission'),
                     category='error')
             raise HTTPInternalServerError()
--- a/kallithea/controllers/admin/repos.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/repos.py	Thu May 27 21:27:37 2021 +0200
@@ -28,7 +28,6 @@
 import logging
 import traceback
 
-import celery.result
 import formencode
 from formencode import htmlfill
 from tg import request
@@ -37,17 +36,15 @@
 from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound
 
 import kallithea
-from kallithea.config.routing import url
-from kallithea.lib import helpers as h
-from kallithea.lib.auth import HasPermissionAny, HasRepoPermissionLevelDecorator, LoginRequired, NotAnonymous
-from kallithea.lib.base import BaseRepoController, jsonify, render
+from kallithea.controllers import base
+from kallithea.lib import webutils
+from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired, NotAnonymous
 from kallithea.lib.exceptions import AttachedForksError
-from kallithea.lib.utils import action_logger
 from kallithea.lib.utils2 import safe_int
 from kallithea.lib.vcs import RepositoryError
-from kallithea.model.db import RepoGroup, Repository, RepositoryField, Setting, UserFollowing
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta, userlog
 from kallithea.model.forms import RepoFieldForm, RepoForm, RepoPermsForm
-from kallithea.model.meta import Session
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import AvailableRepoGroupChoices, RepoList, ScmModel
 
@@ -55,12 +52,7 @@
 log = logging.getLogger(__name__)
 
 
-class ReposController(BaseRepoController):
-    """
-    REST Controller styled on the Atom Publishing Protocol"""
-    # To properly map this controller, ensure your config/routing.py
-    # file has a resource setup:
-    #     map.resource('repo', 'repos')
+class ReposController(base.BaseRepoController):
 
     @LoginRequired(allow_default_user=True)
     def _before(self, *args, **kwargs):
@@ -70,20 +62,14 @@
         repo_obj = c.db_repo
 
         if repo_obj is None:
-            h.not_mapped_error(c.repo_name)
-            raise HTTPFound(location=url('repos'))
+            raise HTTPNotFound()
 
         return repo_obj
 
     def __load_defaults(self, repo=None):
-        top_perms = ['hg.create.repository']
-        if HasPermissionAny('hg.create.write_on_repogroup.true')():
-            repo_group_perm_level = 'write'
-        else:
-            repo_group_perm_level = 'admin'
         extras = [] if repo is None else [repo.group]
 
-        c.repo_groups = AvailableRepoGroupChoices(top_perms, repo_group_perm_level, extras)
+        c.repo_groups = AvailableRepoGroupChoices('write', extras)
 
         c.landing_revs_choices, c.landing_revs = ScmModel().get_repo_landing_revs(repo)
 
@@ -101,13 +87,13 @@
         return defaults
 
     def index(self, format='html'):
-        repos_list = RepoList(Repository.query(sorted=True).all(), perm_level='admin')
+        repos_list = RepoList(db.Repository.query(sorted=True).all(), perm_level='admin')
         # the repo list will be filtered to only show repos where the user has read permissions
         repos_data = RepoModel().get_repos_as_dict(repos_list, admin=True)
         # data used to render the grid
         c.data = repos_data
 
-        return render('admin/repos/repos.html')
+        return base.render('admin/repos/repos.html')
 
     @NotAnonymous()
     def create(self):
@@ -120,7 +106,7 @@
         except formencode.Invalid as errors:
             log.info(errors)
             return htmlfill.render(
-                render('admin/repos/repo_add.html'),
+                base.render('admin/repos/repo_add.html'),
                 defaults=errors.value,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -130,18 +116,17 @@
         try:
             # create is done sometimes async on celery, db transaction
             # management is handled there.
-            task = RepoModel().create(form_result, request.authuser.user_id)
-            task_id = task.task_id
+            RepoModel().create(form_result, request.authuser.user_id)
         except Exception:
             log.error(traceback.format_exc())
             msg = (_('Error creating repository %s')
                    % form_result.get('repo_name'))
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
             raise HTTPFound(location=url('home'))
 
-        raise HTTPFound(location=h.url('repo_creating_home',
+        raise HTTPFound(location=webutils.url('repo_creating_home',
                               repo_name=form_result['repo_name_full'],
-                              task_id=task_id))
+                              ))
 
     @NotAnonymous()
     def create_repository(self):
@@ -151,9 +136,9 @@
         parent_group = request.GET.get('parent_group')
 
         ## apply the defaults from defaults page
-        defaults = Setting.get_default_repo_settings(strip_prefix=True)
+        defaults = db.Setting.get_default_repo_settings(strip_prefix=True)
         if parent_group:
-            prg = RepoGroup.get(parent_group)
+            prg = db.RepoGroup.get(parent_group)
             if prg is None or not any(rgc[0] == prg.group_id
                                       for rgc in c.repo_groups):
                 raise HTTPForbidden
@@ -162,7 +147,7 @@
         defaults.update({'repo_group': parent_group})
 
         return htmlfill.render(
-            render('admin/repos/repo_add.html'),
+            base.render('admin/repos/repo_add.html'),
             defaults=defaults,
             errors={},
             prefix_error=False,
@@ -172,39 +157,30 @@
     @LoginRequired()
     def repo_creating(self, repo_name):
         c.repo = repo_name
-        c.task_id = request.GET.get('task_id')
         if not c.repo:
             raise HTTPNotFound()
-        return render('admin/repos/repo_creating.html')
+        return base.render('admin/repos/repo_creating.html')
 
     @LoginRequired()
-    @jsonify
+    @base.jsonify
     def repo_check(self, repo_name):
         c.repo = repo_name
-        task_id = request.GET.get('task_id')
-
-        if task_id and task_id not in ['None']:
-            if kallithea.CELERY_APP:
-                task_result = celery.result.AsyncResult(task_id, app=kallithea.CELERY_APP)
-                if task_result.failed():
-                    raise HTTPInternalServerError(task_result.traceback)
-
-        repo = Repository.get_by_repo_name(repo_name)
-        if repo and repo.repo_state == Repository.STATE_CREATED:
+        repo = db.Repository.get_by_repo_name(repo_name)
+        if repo and repo.repo_state == db.Repository.STATE_CREATED:
             if repo.clone_uri:
-                h.flash(_('Created repository %s from %s')
+                webutils.flash(_('Created repository %s from %s')
                         % (repo.repo_name, repo.clone_uri_hidden), category='success')
             else:
-                repo_url = h.link_to(repo.repo_name,
-                                     h.url('summary_home',
+                repo_url = webutils.link_to(repo.repo_name,
+                                     webutils.url('summary_home',
                                            repo_name=repo.repo_name))
                 fork = repo.fork
                 if fork is not None:
                     fork_name = fork.repo_name
-                    h.flash(h.HTML(_('Forked repository %s as %s'))
+                    webutils.flash(webutils.HTML(_('Forked repository %s as %s'))
                             % (fork_name, repo_url), category='success')
                 else:
-                    h.flash(h.HTML(_('Created repository %s')) % repo_url,
+                    webutils.flash(webutils.HTML(_('Created repository %s')) % repo_url,
                             category='success')
             return {'result': True}
         return {'result': False}
@@ -214,12 +190,12 @@
         c.repo_info = self._load_repo()
         self.__load_defaults(c.repo_info)
         c.active = 'settings'
-        c.repo_fields = RepositoryField.query() \
-            .filter(RepositoryField.repository == c.repo_info).all()
+        c.repo_fields = db.RepositoryField.query() \
+            .filter(db.RepositoryField.repository == c.repo_info).all()
 
         repo_model = RepoModel()
         changed_name = repo_name
-        repo = Repository.get_by_repo_name(repo_name)
+        repo = db.Repository.get_by_repo_name(repo_name)
         old_data = {
             'repo_name': repo_name,
             'repo_group': repo.group.get_dict() if repo.group else {},
@@ -233,18 +209,18 @@
             form_result = _form.to_python(dict(request.POST))
             repo = repo_model.update(repo_name, **form_result)
             ScmModel().mark_for_invalidation(repo_name)
-            h.flash(_('Repository %s updated successfully') % repo_name,
+            webutils.flash(_('Repository %s updated successfully') % repo_name,
                     category='success')
             changed_name = repo.repo_name
-            action_logger(request.authuser, 'admin_updated_repo',
+            userlog.action_logger(request.authuser, 'admin_updated_repo',
                 changed_name, request.ip_addr)
-            Session().commit()
+            meta.Session().commit()
         except formencode.Invalid as errors:
             log.info(errors)
             defaults = self.__load_data()
             defaults.update(errors.value)
             return htmlfill.render(
-                render('admin/repos/repo_edit.html'),
+                base.render('admin/repos/repo_edit.html'),
                 defaults=defaults,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -253,7 +229,7 @@
 
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during update of repository %s')
+            webutils.flash(_('Error occurred during update of repository %s')
                     % repo_name, category='error')
         raise HTTPFound(location=url('edit_repo', repo_name=changed_name))
 
@@ -262,8 +238,7 @@
         repo_model = RepoModel()
         repo = repo_model.get_by_repo_name(repo_name)
         if not repo:
-            h.not_mapped_error(repo_name)
-            raise HTTPFound(location=url('repos'))
+            raise HTTPNotFound()
         try:
             _forks = repo.forks.count()
             handle_forks = None
@@ -271,23 +246,23 @@
                 do = request.POST['forks']
                 if do == 'detach_forks':
                     handle_forks = 'detach'
-                    h.flash(_('Detached %s forks') % _forks, category='success')
+                    webutils.flash(_('Detached %s forks') % _forks, category='success')
                 elif do == 'delete_forks':
                     handle_forks = 'delete'
-                    h.flash(_('Deleted %s forks') % _forks, category='success')
+                    webutils.flash(_('Deleted %s forks') % _forks, category='success')
             repo_model.delete(repo, forks=handle_forks)
-            action_logger(request.authuser, 'admin_deleted_repo',
+            userlog.action_logger(request.authuser, 'admin_deleted_repo',
                 repo_name, request.ip_addr)
             ScmModel().mark_for_invalidation(repo_name)
-            h.flash(_('Deleted repository %s') % repo_name, category='success')
-            Session().commit()
+            webutils.flash(_('Deleted repository %s') % repo_name, category='success')
+            meta.Session().commit()
         except AttachedForksError:
-            h.flash(_('Cannot delete repository %s which still has forks')
+            webutils.flash(_('Cannot delete repository %s which still has forks')
                         % repo_name, category='warning')
 
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during deletion of %s') % repo_name,
+            webutils.flash(_('An error occurred during deletion of %s') % repo_name,
                     category='error')
 
         if repo.group:
@@ -297,11 +272,11 @@
     @HasRepoPermissionLevelDecorator('admin')
     def edit(self, repo_name):
         defaults = self.__load_data()
-        c.repo_fields = RepositoryField.query() \
-            .filter(RepositoryField.repository == c.repo_info).all()
+        c.repo_fields = db.RepositoryField.query() \
+            .filter(db.RepositoryField.repository == c.repo_info).all()
         c.active = 'settings'
         return htmlfill.render(
-            render('admin/repos/repo_edit.html'),
+            base.render('admin/repos/repo_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -313,7 +288,7 @@
         defaults = RepoModel()._get_defaults(repo_name)
 
         return htmlfill.render(
-            render('admin/repos/repo_edit.html'),
+            base.render('admin/repos/repo_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -326,8 +301,8 @@
         # TODO: implement this
         #action_logger(request.authuser, 'admin_changed_repo_permissions',
         #              repo_name, request.ip_addr)
-        Session().commit()
-        h.flash(_('Repository permissions updated'), category='success')
+        meta.Session().commit()
+        webutils.flash(_('Repository permissions updated'), category='success')
         raise HTTPFound(location=url('edit_repo_perms', repo_name=repo_name))
 
     @HasRepoPermissionLevelDecorator('admin')
@@ -353,10 +328,10 @@
             # TODO: implement this
             #action_logger(request.authuser, 'admin_revoked_repo_permissions',
             #              repo_name, request.ip_addr)
-            Session().commit()
+            meta.Session().commit()
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during revoking of permission'),
+            webutils.flash(_('An error occurred during revoking of permission'),
                     category='error')
             raise HTTPInternalServerError()
         return []
@@ -364,55 +339,55 @@
     @HasRepoPermissionLevelDecorator('admin')
     def edit_fields(self, repo_name):
         c.repo_info = self._load_repo()
-        c.repo_fields = RepositoryField.query() \
-            .filter(RepositoryField.repository == c.repo_info).all()
+        c.repo_fields = db.RepositoryField.query() \
+            .filter(db.RepositoryField.repository == c.repo_info).all()
         c.active = 'fields'
         if request.POST:
 
             raise HTTPFound(location=url('repo_edit_fields'))
-        return render('admin/repos/repo_edit.html')
+        return base.render('admin/repos/repo_edit.html')
 
     @HasRepoPermissionLevelDecorator('admin')
     def create_repo_field(self, repo_name):
         try:
             form_result = RepoFieldForm()().to_python(dict(request.POST))
-            new_field = RepositoryField()
-            new_field.repository = Repository.get_by_repo_name(repo_name)
+            new_field = db.RepositoryField()
+            new_field.repository = db.Repository.get_by_repo_name(repo_name)
             new_field.field_key = form_result['new_field_key']
             new_field.field_type = form_result['new_field_type']  # python type
             new_field.field_value = form_result['new_field_value']  # set initial blank value
             new_field.field_desc = form_result['new_field_desc']
             new_field.field_label = form_result['new_field_label']
-            Session().add(new_field)
-            Session().commit()
+            meta.Session().add(new_field)
+            meta.Session().commit()
         except formencode.Invalid as e:
-            h.flash(_('Field validation error: %s') % e.msg, category='error')
+            webutils.flash(_('Field validation error: %s') % e.msg, category='error')
         except Exception as e:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during creation of field: %r') % e, category='error')
+            webutils.flash(_('An error occurred during creation of field: %r') % e, category='error')
         raise HTTPFound(location=url('edit_repo_fields', repo_name=repo_name))
 
     @HasRepoPermissionLevelDecorator('admin')
     def delete_repo_field(self, repo_name, field_id):
-        field = RepositoryField.get_or_404(field_id)
+        field = db.RepositoryField.get_or_404(field_id)
         try:
-            Session().delete(field)
-            Session().commit()
+            meta.Session().delete(field)
+            meta.Session().commit()
         except Exception as e:
             log.error(traceback.format_exc())
             msg = _('An error occurred during removal of field')
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
         raise HTTPFound(location=url('edit_repo_fields', repo_name=repo_name))
 
     @HasRepoPermissionLevelDecorator('admin')
     def edit_advanced(self, repo_name):
         c.repo_info = self._load_repo()
         c.default_user_id = kallithea.DEFAULT_USER_ID
-        c.in_public_journal = UserFollowing.query() \
-            .filter(UserFollowing.user_id == c.default_user_id) \
-            .filter(UserFollowing.follows_repository == c.repo_info).scalar()
+        c.in_public_journal = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == c.default_user_id) \
+            .filter(db.UserFollowing.follows_repository == c.repo_info).scalar()
 
-        _repos = Repository.query(sorted=True).all()
+        _repos = db.Repository.query(sorted=True).all()
         read_access_repos = RepoList(_repos, perm_level='read')
         c.repos_list = [(None, _('-- Not a fork --'))]
         c.repos_list += [(x.repo_id, x.repo_name)
@@ -428,7 +403,7 @@
         if request.POST:
             raise HTTPFound(location=url('repo_edit_advanced'))
         return htmlfill.render(
-            render('admin/repos/repo_edit.html'),
+            base.render('admin/repos/repo_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -443,14 +418,14 @@
         """
 
         try:
-            repo_id = Repository.get_by_repo_name(repo_name).repo_id
+            repo_id = db.Repository.get_by_repo_name(repo_name).repo_id
             user_id = kallithea.DEFAULT_USER_ID
             self.scm_model.toggle_following_repo(repo_id, user_id)
-            h.flash(_('Updated repository visibility in public journal'),
+            webutils.flash(_('Updated repository visibility in public journal'),
                     category='success')
-            Session().commit()
+            meta.Session().commit()
         except Exception:
-            h.flash(_('An error occurred during setting this'
+            webutils.flash(_('An error occurred during setting this'
                       ' repository in public journal'),
                     category='error')
         raise HTTPFound(location=url('edit_repo_advanced', repo_name=repo_name))
@@ -467,15 +442,15 @@
             repo = ScmModel().mark_as_fork(repo_name, fork_id,
                                            request.authuser.username)
             fork = repo.fork.repo_name if repo.fork else _('Nothing')
-            Session().commit()
-            h.flash(_('Marked repository %s as fork of %s') % (repo_name, fork),
+            meta.Session().commit()
+            webutils.flash(_('Marked repository %s as fork of %s') % (repo_name, fork),
                     category='success')
         except RepositoryError as e:
             log.error(traceback.format_exc())
-            h.flash(e, category='error')
+            webutils.flash(e, category='error')
         except Exception as e:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during this operation'),
+            webutils.flash(_('An error occurred during this operation'),
                     category='error')
 
         raise HTTPFound(location=url('edit_repo_advanced', repo_name=repo_name))
@@ -487,13 +462,13 @@
         if request.POST:
             try:
                 ScmModel().pull_changes(repo_name, request.authuser.username, request.ip_addr)
-                h.flash(_('Pulled from remote location'), category='success')
+                webutils.flash(_('Pulled from remote location'), category='success')
             except Exception as e:
                 log.error(traceback.format_exc())
-                h.flash(_('An error occurred during pull from remote location'),
+                webutils.flash(_('An error occurred during pull from remote location'),
                         category='error')
             raise HTTPFound(location=url('edit_repo_remote', repo_name=c.repo_name))
-        return render('admin/repos/repo_edit.html')
+        return base.render('admin/repos/repo_edit.html')
 
     @HasRepoPermissionLevelDecorator('admin')
     def edit_statistics(self, repo_name):
@@ -518,11 +493,11 @@
         if request.POST:
             try:
                 RepoModel().delete_stats(repo_name)
-                Session().commit()
+                meta.Session().commit()
             except Exception as e:
                 log.error(traceback.format_exc())
-                h.flash(_('An error occurred during deletion of repository stats'),
+                webutils.flash(_('An error occurred during deletion of repository stats'),
                         category='error')
             raise HTTPFound(location=url('edit_repo_statistics', repo_name=c.repo_name))
 
-        return render('admin/repos/repo_edit.html')
+        return base.render('admin/repos/repo_edit.html')
--- a/kallithea/controllers/admin/settings.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/settings.py	Thu May 27 21:27:37 2021 +0200
@@ -35,18 +35,17 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound
 
-from kallithea.config.routing import url
-from kallithea.lib import helpers as h
+import kallithea
+import kallithea.lib.indexers.daemon
+from kallithea.controllers import base
+from kallithea.lib import webutils
 from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
-from kallithea.lib.base import BaseController, render
-from kallithea.lib.celerylib import tasks
-from kallithea.lib.exceptions import HgsubversionImportError
 from kallithea.lib.utils import repo2db_mapper, set_app_settings
 from kallithea.lib.utils2 import safe_str
 from kallithea.lib.vcs import VCSError
-from kallithea.model.db import Repository, Setting, Ui
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta, notification
 from kallithea.model.forms import ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm
-from kallithea.model.meta import Session
 from kallithea.model.notification import EmailNotificationModel
 from kallithea.model.scm import ScmModel
 
@@ -54,19 +53,14 @@
 log = logging.getLogger(__name__)
 
 
-class SettingsController(BaseController):
-    """REST Controller styled on the Atom Publishing Protocol"""
-    # To properly map this controller, ensure your config/routing.py
-    # file has a resource setup:
-    #     map.resource('setting', 'settings', controller='admin/settings',
-    #         path_prefix='/admin', name_prefix='admin_')
+class SettingsController(base.BaseController):
 
     @LoginRequired(allow_default_user=True)
     def _before(self, *args, **kwargs):
         super(SettingsController, self)._before(*args, **kwargs)
 
     def _get_hg_ui_settings(self):
-        ret = Ui.query().all()
+        ret = db.Ui.query().all()
 
         settings = {}
         for each in ret:
@@ -92,7 +86,7 @@
                 form_result = application_form.to_python(dict(request.POST))
             except formencode.Invalid as errors:
                 return htmlfill.render(
-                     render('admin/settings/settings.html'),
+                     base.render('admin/settings/settings.html'),
                      defaults=errors.value,
                      errors=errors.error_dict or {},
                      prefix_error=False,
@@ -101,52 +95,37 @@
 
             try:
                 if c.visual.allow_repo_location_change:
-                    sett = Ui.get_by_key('paths', '/')
+                    sett = db.Ui.get_by_key('paths', '/')
                     sett.ui_value = form_result['paths_root_path']
 
                 # HOOKS
-                sett = Ui.get_by_key('hooks', Ui.HOOK_UPDATE)
-                sett.ui_active = form_result['hooks_changegroup_update']
+                sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE)
+                sett.ui_active = form_result['hooks_changegroup_kallithea_update']
 
-                sett = Ui.get_by_key('hooks', Ui.HOOK_REPO_SIZE)
-                sett.ui_active = form_result['hooks_changegroup_repo_size']
+                sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_REPO_SIZE)
+                sett.ui_active = form_result['hooks_changegroup_kallithea_repo_size']
 
                 ## EXTENSIONS
-                sett = Ui.get_or_create('extensions', 'largefiles')
+                sett = db.Ui.get_or_create('extensions', 'largefiles')
                 sett.ui_active = form_result['extensions_largefiles']
 
-                sett = Ui.get_or_create('extensions', 'hgsubversion')
-                sett.ui_active = form_result['extensions_hgsubversion']
-                if sett.ui_active:
-                    try:
-                        import hgsubversion  # pragma: no cover
-                        assert hgsubversion
-                    except ImportError:
-                        raise HgsubversionImportError
-
-#                sett = Ui.get_or_create('extensions', 'hggit')
+#                sett = db.Ui.get_or_create('extensions', 'hggit')
 #                sett.ui_active = form_result['extensions_hggit']
 
-                Session().commit()
-
-                h.flash(_('Updated VCS settings'), category='success')
+                meta.Session().commit()
 
-            except HgsubversionImportError:
-                log.error(traceback.format_exc())
-                h.flash(_('Unable to activate hgsubversion support. '
-                          'The "hgsubversion" library is missing'),
-                        category='error')
+                webutils.flash(_('Updated VCS settings'), category='success')
 
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred while updating '
+                webutils.flash(_('Error occurred while updating '
                           'application settings'), category='error')
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -168,33 +147,33 @@
                                             install_git_hooks=install_git_hooks,
                                             user=request.authuser.username,
                                             overwrite_git_hooks=overwrite_git_hooks)
-            added_msg = h.HTML(', ').join(
-                h.link_to(safe_str(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added
+            added_msg = webutils.HTML(', ').join(
+                webutils.link_to(safe_str(repo_name), webutils.url('summary_home', repo_name=repo_name)) for repo_name in added
             ) or '-'
-            removed_msg = h.HTML(', ').join(
+            removed_msg = webutils.HTML(', ').join(
                 safe_str(repo_name) for repo_name in removed
             ) or '-'
-            h.flash(h.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) %
+            webutils.flash(webutils.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) %
                     (added_msg, removed_msg), category='success')
 
             if invalidate_cache:
                 log.debug('invalidating all repositories cache')
                 i = 0
-                for repo in Repository.query():
+                for repo in db.Repository.query():
                     try:
                         ScmModel().mark_for_invalidation(repo.repo_name)
                         i += 1
                     except VCSError as e:
                         log.warning('VCS error invalidating %s: %s', repo.repo_name, e)
-                h.flash(_('Invalidated %s repositories') % i, category='success')
+                webutils.flash(_('Invalidated %s repositories') % i, category='success')
 
             raise HTTPFound(location=url('admin_settings_mapping'))
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -208,7 +187,7 @@
                 form_result = application_form.to_python(dict(request.POST))
             except formencode.Invalid as errors:
                 return htmlfill.render(
-                    render('admin/settings/settings.html'),
+                    base.render('admin/settings/settings.html'),
                     defaults=errors.value,
                     errors=errors.error_dict or {},
                     prefix_error=False,
@@ -223,25 +202,25 @@
                     'captcha_public_key',
                     'captcha_private_key',
                 ):
-                    Setting.create_or_update(setting, form_result[setting])
+                    db.Setting.create_or_update(setting, form_result[setting])
 
-                Session().commit()
+                meta.Session().commit()
                 set_app_settings(config)
-                h.flash(_('Updated application settings'), category='success')
+                webutils.flash(_('Updated application settings'), category='success')
 
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred while updating '
+                webutils.flash(_('Error occurred while updating '
                           'application settings'),
                           category='error')
 
             raise HTTPFound(location=url('admin_settings_global'))
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -255,7 +234,7 @@
                 form_result = application_form.to_python(dict(request.POST))
             except formencode.Invalid as errors:
                 return htmlfill.render(
-                    render('admin/settings/settings.html'),
+                    base.render('admin/settings/settings.html'),
                     defaults=errors.value,
                     errors=errors.error_dict or {},
                     prefix_error=False,
@@ -277,26 +256,26 @@
                     ('clone_ssh_tmpl', 'clone_ssh_tmpl', 'unicode'),
                 ]
                 for setting, form_key, type_ in settings:
-                    Setting.create_or_update(setting, form_result[form_key], type_)
+                    db.Setting.create_or_update(setting, form_result[form_key], type_)
 
-                Session().commit()
+                meta.Session().commit()
                 set_app_settings(config)
-                h.flash(_('Updated visualisation settings'),
+                webutils.flash(_('Updated visualisation settings'),
                         category='success')
 
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during updating '
+                webutils.flash(_('Error occurred during updating '
                           'visualisation settings'),
                         category='error')
 
             raise HTTPFound(location=url('admin_settings_visual'))
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -310,7 +289,7 @@
             test_body = ('Kallithea Email test, '
                                'Kallithea version: %s' % c.kallithea_version)
             if not test_email:
-                h.flash(_('Please enter email address'), category='error')
+                webutils.flash(_('Please enter email address'), category='error')
                 raise HTTPFound(location=url('admin_settings_email'))
 
             test_email_txt_body = EmailNotificationModel() \
@@ -322,20 +301,19 @@
 
             recipients = [test_email] if test_email else None
 
-            tasks.send_email(recipients, test_email_subj,
+            notification.send_email(recipients, test_email_subj,
                              test_email_txt_body, test_email_html_body)
 
-            h.flash(_('Send email task created'), category='success')
+            webutils.flash(_('Send email task created'), category='success')
             raise HTTPFound(location=url('admin_settings_email'))
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
-        import kallithea
         c.ini = kallithea.CONFIG
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -352,16 +330,16 @@
 
                 try:
                     ui_key = ui_key and ui_key.strip()
-                    if ui_key in (x.ui_key for x in Ui.get_custom_hooks()):
-                        h.flash(_('Hook already exists'), category='error')
-                    elif ui_key in (x.ui_key for x in Ui.get_builtin_hooks()):
-                        h.flash(_('Builtin hooks are read-only. Please use another hook name.'), category='error')
+                    if ui_key in (x.ui_key for x in db.Ui.get_custom_hooks()):
+                        webutils.flash(_('Hook already exists'), category='error')
+                    elif ui_key and '.kallithea_' in ui_key:
+                        webutils.flash(_('Hook names with ".kallithea_" are reserved for internal use. Please use another hook name.'), category='error')
                     elif ui_value and ui_key:
-                        Ui.create_or_update_hook(ui_key, ui_value)
-                        h.flash(_('Added new hook'), category='success')
+                        db.Ui.create_or_update_hook(ui_key, ui_value)
+                        webutils.flash(_('Added new hook'), category='success')
                     elif hook_id:
-                        Ui.delete(hook_id)
-                        Session().commit()
+                        db.Ui.delete(hook_id)
+                        meta.Session().commit()
 
                     # check for edits
                     update = False
@@ -370,27 +348,26 @@
                                         _d.get('hook_ui_value_new', []),
                                         _d.get('hook_ui_value', [])):
                         if v != ov:
-                            Ui.create_or_update_hook(k, v)
+                            db.Ui.create_or_update_hook(k, v)
                             update = True
 
                     if update:
-                        h.flash(_('Updated hooks'), category='success')
-                    Session().commit()
+                        webutils.flash(_('Updated hooks'), category='success')
+                    meta.Session().commit()
                 except Exception:
                     log.error(traceback.format_exc())
-                    h.flash(_('Error occurred during hook creation'),
+                    webutils.flash(_('Error occurred during hook creation'),
                             category='error')
 
                 raise HTTPFound(location=url('admin_settings_hooks'))
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
-        c.hooks = Ui.get_builtin_hooks()
-        c.custom_hooks = Ui.get_custom_hooks()
+        c.custom_hooks = db.Ui.get_custom_hooks()
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -401,15 +378,15 @@
         if request.POST:
             repo_location = self._get_hg_ui_settings()['paths_root_path']
             full_index = request.POST.get('full_index', False)
-            tasks.whoosh_index(repo_location, full_index)
-            h.flash(_('Whoosh reindex task scheduled'), category='success')
+            kallithea.lib.indexers.daemon.whoosh_index(repo_location, full_index)
+            webutils.flash(_('Whoosh reindex task scheduled'), category='success')
             raise HTTPFound(location=url('admin_settings_search'))
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -418,17 +395,16 @@
     def settings_system(self):
         c.active = 'system'
 
-        defaults = Setting.get_app_settings()
+        defaults = db.Setting.get_app_settings()
         defaults.update(self._get_hg_ui_settings())
 
-        import kallithea
         c.ini = kallithea.CONFIG
-        server_info = Setting.get_server_info()
+        server_info = db.Setting.get_server_info()
         for key, val in server_info.items():
             setattr(c, key, val)
 
         return htmlfill.render(
-            render('admin/settings/settings.html'),
+            base.render('admin/settings/settings.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
--- a/kallithea/controllers/admin/user_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/user_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -37,16 +37,15 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound, HTTPInternalServerError
 
-from kallithea.config.routing import url
-from kallithea.lib import helpers as h
+import kallithea.lib.helpers as h
+from kallithea.controllers import base
+from kallithea.lib import webutils
 from kallithea.lib.auth import HasPermissionAnyDecorator, HasUserGroupPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseController, render
 from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException
-from kallithea.lib.utils import action_logger
 from kallithea.lib.utils2 import safe_int, safe_str
-from kallithea.model.db import User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta, userlog
 from kallithea.model.forms import CustomDefaultPermissionsForm, UserGroupForm, UserGroupPermsForm
-from kallithea.model.meta import Session
 from kallithea.model.scm import UserGroupList
 from kallithea.model.user_group import UserGroupModel
 
@@ -54,8 +53,7 @@
 log = logging.getLogger(__name__)
 
 
-class UserGroupsController(BaseController):
-    """REST Controller styled on the Atom Publishing Protocol"""
+class UserGroupsController(base.BaseController):
 
     @LoginRequired(allow_default_user=True)
     def _before(self, *args, **kwargs):
@@ -67,7 +65,7 @@
 
         c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
         c.available_members = sorted(((x.user_id, x.username) for x in
-                                      User.query().all()),
+                                      db.User.query().all()),
                                      key=lambda u: u[1].lower())
 
     def __load_defaults(self, user_group_id):
@@ -76,13 +74,13 @@
 
         :param user_group_id:
         """
-        user_group = UserGroup.get_or_404(user_group_id)
+        user_group = db.UserGroup.get_or_404(user_group_id)
         data = user_group.get_dict()
         return data
 
     def index(self, format='html'):
-        _list = UserGroup.query() \
-                        .order_by(func.lower(UserGroup.users_group_name)) \
+        _list = db.UserGroup.query() \
+                        .order_by(func.lower(db.UserGroup.users_group_name)) \
                         .all()
         group_iter = UserGroupList(_list, perm_level='admin')
         user_groups_data = []
@@ -91,21 +89,21 @@
 
         def user_group_name(user_group_id, user_group_name):
             return template.get_def("user_group_name") \
-                .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
+                .render_unicode(user_group_id, user_group_name, _=_, webutils=webutils, c=c)
 
         def user_group_actions(user_group_id, user_group_name):
             return template.get_def("user_group_actions") \
-                .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
+                .render_unicode(user_group_id, user_group_name, _=_, webutils=webutils, c=c)
 
         for user_gr in group_iter:
             user_groups_data.append({
                 "raw_name": user_gr.users_group_name,
                 "group_name": user_group_name(user_gr.users_group_id,
                                               user_gr.users_group_name),
-                "desc": h.escape(user_gr.user_group_description),
+                "desc": webutils.escape(user_gr.user_group_description),
                 "members": len(user_gr.members),
                 "active": h.boolicon(user_gr.users_group_active),
-                "owner": h.person(user_gr.owner.username),
+                "owner": user_gr.owner.username,
                 "action": user_group_actions(user_gr.users_group_id, user_gr.users_group_name)
             })
 
@@ -115,7 +113,7 @@
             "records": user_groups_data
         }
 
-        return render('admin/user_groups/user_groups.html')
+        return base.render('admin/user_groups/user_groups.html')
 
     @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
     def create(self):
@@ -128,15 +126,15 @@
                                          active=form_result['users_group_active'])
 
             gr = form_result['users_group_name']
-            action_logger(request.authuser,
+            userlog.action_logger(request.authuser,
                           'admin_created_users_group:%s' % gr,
                           None, request.ip_addr)
-            h.flash(h.HTML(_('Created user group %s')) % h.link_to(gr, url('edit_users_group', id=ug.users_group_id)),
+            webutils.flash(webutils.HTML(_('Created user group %s')) % webutils.link_to(gr, url('edit_users_group', id=ug.users_group_id)),
                 category='success')
-            Session().commit()
+            meta.Session().commit()
         except formencode.Invalid as errors:
             return htmlfill.render(
-                render('admin/user_groups/user_group_add.html'),
+                base.render('admin/user_groups/user_group_add.html'),
                 defaults=errors.value,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -144,18 +142,18 @@
                 force_defaults=False)
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during creation of user group %s')
+            webutils.flash(_('Error occurred during creation of user group %s')
                     % request.POST.get('users_group_name'), category='error')
 
         raise HTTPFound(location=url('users_groups'))
 
     @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
     def new(self, format='html'):
-        return render('admin/user_groups/user_group_add.html')
+        return base.render('admin/user_groups/user_group_add.html')
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def update(self, id):
-        c.user_group = UserGroup.get_or_404(id)
+        c.user_group = db.UserGroup.get_or_404(id)
         c.active = 'settings'
         self.__load_data(id)
 
@@ -169,11 +167,11 @@
             form_result = users_group_form.to_python(request.POST)
             UserGroupModel().update(c.user_group, form_result)
             gr = form_result['users_group_name']
-            action_logger(request.authuser,
+            userlog.action_logger(request.authuser,
                           'admin_updated_users_group:%s' % gr,
                           None, request.ip_addr)
-            h.flash(_('Updated user group %s') % gr, category='success')
-            Session().commit()
+            webutils.flash(_('Updated user group %s') % gr, category='success')
+            meta.Session().commit()
         except formencode.Invalid as errors:
             ug_model = UserGroupModel()
             defaults = errors.value
@@ -186,7 +184,7 @@
             })
 
             return htmlfill.render(
-                render('admin/user_groups/user_group_edit.html'),
+                base.render('admin/user_groups/user_group_edit.html'),
                 defaults=defaults,
                 errors=e,
                 prefix_error=False,
@@ -194,36 +192,36 @@
                 force_defaults=False)
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during update of user group %s')
+            webutils.flash(_('Error occurred during update of user group %s')
                     % request.POST.get('users_group_name'), category='error')
 
         raise HTTPFound(location=url('edit_users_group', id=id))
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def delete(self, id):
-        usr_gr = UserGroup.get_or_404(id)
+        usr_gr = db.UserGroup.get_or_404(id)
         try:
             UserGroupModel().delete(usr_gr)
-            Session().commit()
-            h.flash(_('Successfully deleted user group'), category='success')
+            meta.Session().commit()
+            webutils.flash(_('Successfully deleted user group'), category='success')
         except UserGroupsAssignedException as e:
-            h.flash(e, category='error')
+            webutils.flash(e, category='error')
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during deletion of user group'),
+            webutils.flash(_('An error occurred during deletion of user group'),
                     category='error')
         raise HTTPFound(location=url('users_groups'))
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def edit(self, id, format='html'):
-        c.user_group = UserGroup.get_or_404(id)
+        c.user_group = db.UserGroup.get_or_404(id)
         c.active = 'settings'
         self.__load_data(id)
 
         defaults = self.__load_defaults(id)
 
         return htmlfill.render(
-            render('admin/user_groups/user_group_edit.html'),
+            base.render('admin/user_groups/user_group_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False
@@ -231,7 +229,7 @@
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def edit_perms(self, id):
-        c.user_group = UserGroup.get_or_404(id)
+        c.user_group = db.UserGroup.get_or_404(id)
         c.active = 'perms'
 
         defaults = {}
@@ -245,7 +243,7 @@
                              p.permission.permission_name})
 
         return htmlfill.render(
-            render('admin/user_groups/user_group_edit.html'),
+            base.render('admin/user_groups/user_group_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False
@@ -258,7 +256,7 @@
 
         :param id:
         """
-        user_group = UserGroup.get_or_404(id)
+        user_group = db.UserGroup.get_or_404(id)
         form = UserGroupPermsForm()().to_python(request.POST)
 
         # set the permissions !
@@ -266,13 +264,13 @@
             UserGroupModel()._update_permissions(user_group, form['perms_new'],
                                                  form['perms_updates'])
         except RepoGroupAssignmentError:
-            h.flash(_('Target group cannot be the same'), category='error')
+            webutils.flash(_('Target group cannot be the same'), category='error')
             raise HTTPFound(location=url('edit_user_group_perms', id=id))
         # TODO: implement this
         #action_logger(request.authuser, 'admin_changed_repo_permissions',
         #              repo_name, request.ip_addr)
-        Session().commit()
-        h.flash(_('User group permissions updated'), category='success')
+        meta.Session().commit()
+        webutils.flash(_('User group permissions updated'), category='success')
         raise HTTPFound(location=url('edit_user_group_perms', id=id))
 
     @HasUserGroupPermissionLevelDecorator('admin')
@@ -288,7 +286,7 @@
             if not request.authuser.is_admin:
                 if obj_type == 'user' and request.authuser.user_id == obj_id:
                     msg = _('Cannot revoke permission for yourself as admin')
-                    h.flash(msg, category='warning')
+                    webutils.flash(msg, category='warning')
                     raise Exception('revoke admin permission on self')
             if obj_type == 'user':
                 UserGroupModel().revoke_user_permission(user_group=id,
@@ -296,36 +294,36 @@
             elif obj_type == 'user_group':
                 UserGroupModel().revoke_user_group_permission(target_user_group=id,
                                                               user_group=obj_id)
-            Session().commit()
+            meta.Session().commit()
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during revoking of permission'),
+            webutils.flash(_('An error occurred during revoking of permission'),
                     category='error')
             raise HTTPInternalServerError()
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def edit_default_perms(self, id):
-        c.user_group = UserGroup.get_or_404(id)
+        c.user_group = db.UserGroup.get_or_404(id)
         c.active = 'default_perms'
 
         permissions = {
             'repositories': {},
             'repositories_groups': {}
         }
-        ugroup_repo_perms = UserGroupRepoToPerm.query() \
-            .options(joinedload(UserGroupRepoToPerm.permission)) \
-            .options(joinedload(UserGroupRepoToPerm.repository)) \
-            .filter(UserGroupRepoToPerm.users_group_id == id) \
+        ugroup_repo_perms = db.UserGroupRepoToPerm.query() \
+            .options(joinedload(db.UserGroupRepoToPerm.permission)) \
+            .options(joinedload(db.UserGroupRepoToPerm.repository)) \
+            .filter(db.UserGroupRepoToPerm.users_group_id == id) \
             .all()
 
         for gr in ugroup_repo_perms:
             permissions['repositories'][gr.repository.repo_name]  \
                 = gr.permission.permission_name
 
-        ugroup_group_perms = UserGroupRepoGroupToPerm.query() \
-            .options(joinedload(UserGroupRepoGroupToPerm.permission)) \
-            .options(joinedload(UserGroupRepoGroupToPerm.group)) \
-            .filter(UserGroupRepoGroupToPerm.users_group_id == id) \
+        ugroup_group_perms = db.UserGroupRepoGroupToPerm.query() \
+            .options(joinedload(db.UserGroupRepoGroupToPerm.permission)) \
+            .options(joinedload(db.UserGroupRepoGroupToPerm.group)) \
+            .filter(db.UserGroupRepoGroupToPerm.users_group_id == id) \
             .all()
 
         for gr in ugroup_group_perms:
@@ -346,7 +344,7 @@
         })
 
         return htmlfill.render(
-            render('admin/user_groups/user_group_edit.html'),
+            base.render('admin/user_groups/user_group_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False
@@ -354,7 +352,7 @@
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def update_default_perms(self, id):
-        user_group = UserGroup.get_or_404(id)
+        user_group = db.UserGroup.get_or_404(id)
 
         try:
             form = CustomDefaultPermissionsForm()()
@@ -362,11 +360,11 @@
 
             usergroup_model = UserGroupModel()
 
-            defs = UserGroupToPerm.query() \
-                .filter(UserGroupToPerm.users_group == user_group) \
+            defs = db.UserGroupToPerm.query() \
+                .filter(db.UserGroupToPerm.users_group == user_group) \
                 .all()
             for ug in defs:
-                Session().delete(ug)
+                meta.Session().delete(ug)
 
             if form_result['create_repo_perm']:
                 usergroup_model.grant_perm(id, 'hg.create.repository')
@@ -381,29 +379,29 @@
             else:
                 usergroup_model.grant_perm(id, 'hg.fork.none')
 
-            h.flash(_("Updated permissions"), category='success')
-            Session().commit()
+            webutils.flash(_("Updated permissions"), category='success')
+            meta.Session().commit()
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during permissions saving'),
+            webutils.flash(_('An error occurred during permissions saving'),
                     category='error')
 
         raise HTTPFound(location=url('edit_user_group_default_perms', id=id))
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def edit_advanced(self, id):
-        c.user_group = UserGroup.get_or_404(id)
+        c.user_group = db.UserGroup.get_or_404(id)
         c.active = 'advanced'
         c.group_members_obj = sorted((x.user for x in c.user_group.members),
                                      key=lambda u: u.username.lower())
-        return render('admin/user_groups/user_group_edit.html')
+        return base.render('admin/user_groups/user_group_edit.html')
 
     @HasUserGroupPermissionLevelDecorator('admin')
     def edit_members(self, id):
-        c.user_group = UserGroup.get_or_404(id)
+        c.user_group = db.UserGroup.get_or_404(id)
         c.active = 'members'
         c.group_members_obj = sorted((x.user for x in c.user_group.members),
                                      key=lambda u: u.username.lower())
 
         c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
-        return render('admin/user_groups/user_group_edit.html')
+        return base.render('admin/user_groups/user_group_edit.html')
--- a/kallithea/controllers/admin/users.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/admin/users.py	Thu May 27 21:27:37 2021 +0200
@@ -37,18 +37,16 @@
 from webob.exc import HTTPFound, HTTPNotFound
 
 import kallithea
-from kallithea.config.routing import url
-from kallithea.lib import auth_modules
-from kallithea.lib import helpers as h
+import kallithea.lib.helpers as h
+from kallithea.controllers import base
+from kallithea.lib import auth_modules, webutils
 from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired
-from kallithea.lib.base import BaseController, IfSshEnabled, render
 from kallithea.lib.exceptions import DefaultUserException, UserCreationError, UserOwnsReposException
-from kallithea.lib.utils import action_logger
 from kallithea.lib.utils2 import datetime_to_time, generate_api_key, safe_int
+from kallithea.lib.webutils import fmt_date, url
+from kallithea.model import db, meta, userlog
 from kallithea.model.api_key import ApiKeyModel
-from kallithea.model.db import User, UserEmailMap, UserIpMap, UserToPerm
 from kallithea.model.forms import CustomDefaultPermissionsForm, UserForm
-from kallithea.model.meta import Session
 from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException
 from kallithea.model.user import UserModel
 
@@ -56,8 +54,7 @@
 log = logging.getLogger(__name__)
 
 
-class UsersController(BaseController):
-    """REST Controller styled on the Atom Publishing Protocol"""
+class UsersController(base.BaseController):
 
     @LoginRequired()
     @HasPermissionAnyDecorator('hg.admin')
@@ -65,9 +62,9 @@
         super(UsersController, self)._before(*args, **kwargs)
 
     def index(self, format='html'):
-        c.users_list = User.query().order_by(User.username) \
+        c.users_list = db.User.query().order_by(db.User.username) \
                         .filter_by(is_default_user=False) \
-                        .order_by(func.lower(User.username)) \
+                        .order_by(func.lower(db.User.username)) \
                         .all()
 
         users_data = []
@@ -78,20 +75,20 @@
 
         def username(user_id, username):
             return template.get_def("user_name") \
-                .render_unicode(user_id, username, _=_, h=h, c=c)
+                .render_unicode(user_id, username, _=_, webutils=webutils, c=c)
 
         def user_actions(user_id, username):
             return template.get_def("user_actions") \
-                .render_unicode(user_id, username, _=_, h=h, c=c)
+                .render_unicode(user_id, username, _=_, webutils=webutils, c=c)
 
         for user in c.users_list:
             users_data.append({
                 "gravatar": grav_tmpl % h.gravatar(user.email, size=20),
                 "raw_name": user.username,
                 "username": username(user.user_id, user.username),
-                "firstname": h.escape(user.name),
-                "lastname": h.escape(user.lastname),
-                "last_login": h.fmt_date(user.last_login),
+                "firstname": webutils.escape(user.name),
+                "lastname": webutils.escape(user.lastname),
+                "last_login": fmt_date(user.last_login),
                 "last_login_raw": datetime_to_time(user.last_login),
                 "active": h.boolicon(user.active),
                 "admin": h.boolicon(user.admin),
@@ -106,41 +103,41 @@
             "records": users_data
         }
 
-        return render('admin/users/users.html')
+        return base.render('admin/users/users.html')
 
     def create(self):
-        c.default_extern_type = User.DEFAULT_AUTH_TYPE
+        c.default_extern_type = db.User.DEFAULT_AUTH_TYPE
         c.default_extern_name = ''
         user_model = UserModel()
         user_form = UserForm()()
         try:
             form_result = user_form.to_python(dict(request.POST))
             user = user_model.create(form_result)
-            action_logger(request.authuser, 'admin_created_user:%s' % user.username,
+            userlog.action_logger(request.authuser, 'admin_created_user:%s' % user.username,
                           None, request.ip_addr)
-            h.flash(_('Created user %s') % user.username,
+            webutils.flash(_('Created user %s') % user.username,
                     category='success')
-            Session().commit()
+            meta.Session().commit()
         except formencode.Invalid as errors:
             return htmlfill.render(
-                render('admin/users/user_add.html'),
+                base.render('admin/users/user_add.html'),
                 defaults=errors.value,
                 errors=errors.error_dict or {},
                 prefix_error=False,
                 encoding="UTF-8",
                 force_defaults=False)
         except UserCreationError as e:
-            h.flash(e, 'error')
+            webutils.flash(e, 'error')
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during creation of user %s')
+            webutils.flash(_('Error occurred during creation of user %s')
                     % request.POST.get('username'), category='error')
         raise HTTPFound(location=url('edit_user', id=user.user_id))
 
     def new(self, format='html'):
-        c.default_extern_type = User.DEFAULT_AUTH_TYPE
+        c.default_extern_type = db.User.DEFAULT_AUTH_TYPE
         c.default_extern_name = ''
-        return render('admin/users/user_add.html')
+        return base.render('admin/users/user_add.html')
 
     def update(self, id):
         user_model = UserModel()
@@ -155,10 +152,10 @@
 
             user_model.update(id, form_result, skip_attrs=skip_attrs)
             usr = form_result['username']
-            action_logger(request.authuser, 'admin_updated_user:%s' % usr,
+            userlog.action_logger(request.authuser, 'admin_updated_user:%s' % usr,
                           None, request.ip_addr)
-            h.flash(_('User updated successfully'), category='success')
-            Session().commit()
+            webutils.flash(_('User updated successfully'), category='success')
+            meta.Session().commit()
         except formencode.Invalid as errors:
             defaults = errors.value
             e = errors.error_dict or {}
@@ -176,29 +173,33 @@
                 force_defaults=False)
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('Error occurred during update of user %s')
+            webutils.flash(_('Error occurred during update of user %s')
                     % form_result.get('username'), category='error')
         raise HTTPFound(location=url('edit_user', id=id))
 
     def delete(self, id):
-        usr = User.get_or_404(id)
+        usr = db.User.get_or_404(id)
+        has_ssh_keys = bool(usr.ssh_keys)
         try:
             UserModel().delete(usr)
-            Session().commit()
-            h.flash(_('Successfully deleted user'), category='success')
+            meta.Session().commit()
+            webutils.flash(_('Successfully deleted user'), category='success')
         except (UserOwnsReposException, DefaultUserException) as e:
-            h.flash(e, category='warning')
+            webutils.flash(e, category='warning')
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during deletion of user'),
+            webutils.flash(_('An error occurred during deletion of user'),
                     category='error')
+        else:
+            if has_ssh_keys:
+                SshKeyModel().write_authorized_keys()
         raise HTTPFound(location=url('users'))
 
     def _get_user_or_raise_if_default(self, id):
         try:
-            return User.get_or_404(id, allow_default=False)
+            return db.User.get_or_404(id, allow_default=False)
         except DefaultUserException:
-            h.flash(_("The default user cannot be edited"), category='warning')
+            webutils.flash(_("The default user cannot be edited"), category='warning')
             raise HTTPNotFound
 
     def _render_edit_profile(self, user):
@@ -207,7 +208,7 @@
         c.perm_user = AuthUser(dbuser=user)
         managed_fields = auth_modules.get_managed_fields(user)
         c.readonly = lambda n: 'readonly' if n in managed_fields else None
-        return render('admin/users/user_edit.html')
+        return base.render('admin/users/user_edit.html')
 
     def edit(self, id, format='html'):
         user = self._get_user_or_raise_if_default(id)
@@ -233,7 +234,7 @@
             'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'),
         })
         return htmlfill.render(
-            render('admin/users/user_edit.html'),
+            base.render('admin/users/user_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -254,7 +255,7 @@
                                                      show_expired=show_expired)
         defaults = c.user.get_dict()
         return htmlfill.render(
-            render('admin/users/user_edit.html'),
+            base.render('admin/users/user_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -265,8 +266,8 @@
         lifetime = safe_int(request.POST.get('lifetime'), -1)
         description = request.POST.get('description')
         ApiKeyModel().create(c.user.user_id, description, lifetime)
-        Session().commit()
-        h.flash(_("API key successfully created"), category='success')
+        meta.Session().commit()
+        webutils.flash(_("API key successfully created"), category='success')
         raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id))
 
     def delete_api_key(self, id):
@@ -275,12 +276,12 @@
         api_key = request.POST.get('del_api_key')
         if request.POST.get('del_api_key_builtin'):
             c.user.api_key = generate_api_key()
-            Session().commit()
-            h.flash(_("API key successfully reset"), category='success')
+            meta.Session().commit()
+            webutils.flash(_("API key successfully reset"), category='success')
         elif api_key:
             ApiKeyModel().delete(api_key, c.user.user_id)
-            Session().commit()
-            h.flash(_("API key successfully deleted"), category='success')
+            meta.Session().commit()
+            webutils.flash(_("API key successfully deleted"), category='success')
 
         raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id))
 
@@ -301,7 +302,7 @@
             'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'),
         })
         return htmlfill.render(
-            render('admin/users/user_edit.html'),
+            base.render('admin/users/user_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -315,11 +316,11 @@
 
             user_model = UserModel()
 
-            defs = UserToPerm.query() \
-                .filter(UserToPerm.user == user) \
+            defs = db.UserToPerm.query() \
+                .filter(db.UserToPerm.user == user) \
                 .all()
             for ug in defs:
-                Session().delete(ug)
+                meta.Session().delete(ug)
 
             if form_result['create_repo_perm']:
                 user_model.grant_perm(id, 'hg.create.repository')
@@ -333,23 +334,23 @@
                 user_model.grant_perm(id, 'hg.fork.repository')
             else:
                 user_model.grant_perm(id, 'hg.fork.none')
-            h.flash(_("Updated permissions"), category='success')
-            Session().commit()
+            webutils.flash(_("Updated permissions"), category='success')
+            meta.Session().commit()
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during permissions saving'),
+            webutils.flash(_('An error occurred during permissions saving'),
                     category='error')
         raise HTTPFound(location=url('edit_user_perms', id=id))
 
     def edit_emails(self, id):
         c.user = self._get_user_or_raise_if_default(id)
         c.active = 'emails'
-        c.user_email_map = UserEmailMap.query() \
-            .filter(UserEmailMap.user == c.user).all()
+        c.user_email_map = db.UserEmailMap.query() \
+            .filter(db.UserEmailMap.user == c.user).all()
 
         defaults = c.user.get_dict()
         return htmlfill.render(
-            render('admin/users/user_edit.html'),
+            base.render('admin/users/user_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -361,14 +362,14 @@
 
         try:
             user_model.add_extra_email(id, email)
-            Session().commit()
-            h.flash(_("Added email %s to user") % email, category='success')
+            meta.Session().commit()
+            webutils.flash(_("Added email %s to user") % email, category='success')
         except formencode.Invalid as error:
             msg = error.error_dict['email']
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during email saving'),
+            webutils.flash(_('An error occurred during email saving'),
                     category='error')
         raise HTTPFound(location=url('edit_user_emails', id=id))
 
@@ -377,22 +378,22 @@
         email_id = request.POST.get('del_email_id')
         user_model = UserModel()
         user_model.delete_extra_email(id, email_id)
-        Session().commit()
-        h.flash(_("Removed email from user"), category='success')
+        meta.Session().commit()
+        webutils.flash(_("Removed email from user"), category='success')
         raise HTTPFound(location=url('edit_user_emails', id=id))
 
     def edit_ips(self, id):
         c.user = self._get_user_or_raise_if_default(id)
         c.active = 'ips'
-        c.user_ip_map = UserIpMap.query() \
-            .filter(UserIpMap.user == c.user).all()
+        c.user_ip_map = db.UserIpMap.query() \
+            .filter(db.UserIpMap.user == c.user).all()
 
-        c.default_user_ip_map = UserIpMap.query() \
-            .filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all()
+        c.default_user_ip_map = db.UserIpMap.query() \
+            .filter(db.UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all()
 
         defaults = c.user.get_dict()
         return htmlfill.render(
-            render('admin/users/user_edit.html'),
+            base.render('admin/users/user_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -403,14 +404,14 @@
 
         try:
             user_model.add_extra_ip(id, ip)
-            Session().commit()
-            h.flash(_("Added IP address %s to user whitelist") % ip, category='success')
+            meta.Session().commit()
+            webutils.flash(_("Added IP address %s to user whitelist") % ip, category='success')
         except formencode.Invalid as error:
             msg = error.error_dict['ip']
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred while adding IP address'),
+            webutils.flash(_('An error occurred while adding IP address'),
                     category='error')
 
         if 'default_user' in request.POST:
@@ -421,26 +422,26 @@
         ip_id = request.POST.get('del_ip_id')
         user_model = UserModel()
         user_model.delete_extra_ip(id, ip_id)
-        Session().commit()
-        h.flash(_("Removed IP address from user whitelist"), category='success')
+        meta.Session().commit()
+        webutils.flash(_("Removed IP address from user whitelist"), category='success')
 
         if 'default_user' in request.POST:
             raise HTTPFound(location=url('admin_permissions_ips'))
         raise HTTPFound(location=url('edit_user_ips', id=id))
 
-    @IfSshEnabled
+    @base.IfSshEnabled
     def edit_ssh_keys(self, id):
         c.user = self._get_user_or_raise_if_default(id)
         c.active = 'ssh_keys'
         c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
         defaults = c.user.get_dict()
         return htmlfill.render(
-            render('admin/users/user_edit.html'),
+            base.render('admin/users/user_edit.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
 
-    @IfSshEnabled
+    @base.IfSshEnabled
     def ssh_keys_add(self, id):
         c.user = self._get_user_or_raise_if_default(id)
 
@@ -449,23 +450,23 @@
         try:
             new_ssh_key = SshKeyModel().create(c.user.user_id,
                                                description, public_key)
-            Session().commit()
+            meta.Session().commit()
             SshKeyModel().write_authorized_keys()
-            h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
+            webutils.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
         except SshKeyModelException as e:
-            h.flash(e.args[0], category='error')
+            webutils.flash(e.args[0], category='error')
         raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
 
-    @IfSshEnabled
+    @base.IfSshEnabled
     def ssh_keys_delete(self, id):
         c.user = self._get_user_or_raise_if_default(id)
 
         fingerprint = request.POST.get('del_public_key_fingerprint')
         try:
             SshKeyModel().delete(fingerprint, c.user.user_id)
-            Session().commit()
+            meta.Session().commit()
             SshKeyModel().write_authorized_keys()
-            h.flash(_("SSH key successfully deleted"), category='success')
+            webutils.flash(_("SSH key successfully deleted"), category='success')
         except SshKeyModelException as e:
-            h.flash(e.args[0], category='error')
+            webutils.flash(e.args[0], category='error')
         raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
--- a/kallithea/controllers/api/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/api/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -35,12 +35,11 @@
 from tg import Response, TGController, request, response
 from webob.exc import HTTPError, HTTPException
 
+from kallithea.controllers import base
 from kallithea.lib import ext_json
 from kallithea.lib.auth import AuthUser
-from kallithea.lib.base import _get_ip_addr as _get_ip
-from kallithea.lib.base import get_path_info
 from kallithea.lib.utils2 import ascii_bytes
-from kallithea.model.db import User
+from kallithea.model import db
 
 
 log = logging.getLogger('JSONRPC')
@@ -83,9 +82,6 @@
 
      """
 
-    def _get_ip_addr(self, environ):
-        return _get_ip(environ)
-
     def _get_method_args(self):
         """
         Return `self._rpc_args` to dispatched controller method
@@ -103,7 +99,7 @@
 
         environ = state.request.environ
         start = time.time()
-        ip_addr = self._get_ip_addr(environ)
+        ip_addr = base.get_ip_addr(environ)
         self._req_id = None
         if 'CONTENT_LENGTH' not in environ:
             log.debug("No Content-Length")
@@ -145,7 +141,7 @@
 
         # check if we can find this session using api_key
         try:
-            u = User.get_by_api_key(self._req_api_key)
+            u = db.User.get_by_api_key(self._req_api_key)
             auth_user = AuthUser.make(dbuser=u, ip_addr=ip_addr)
             if auth_user is None:
                 raise JSONRPCErrorResponse(retid=self._req_id,
@@ -208,8 +204,8 @@
         self._rpc_args['environ'] = environ
 
         log.info('IP: %s Request to %s time: %.3fs' % (
-            self._get_ip_addr(environ),
-            get_path_info(environ), time.time() - start)
+            base.get_ip_addr(environ),
+            base.get_path_info(environ), time.time() - start)
         )
 
         state.set_action(self._rpc_call, [])
--- a/kallithea/controllers/api/api.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/api/api.py	Thu May 27 21:27:37 2021 +0200
@@ -35,15 +35,13 @@
 from kallithea.lib.auth import (AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel,
                                 HasUserGroupPermissionLevel)
 from kallithea.lib.exceptions import DefaultUserException, UserGroupsAssignedException
-from kallithea.lib.utils import action_logger, repo2db_mapper
-from kallithea.lib.utils2 import OAttr, Optional
+from kallithea.lib.utils import repo2db_mapper
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import EmptyRepositoryError
+from kallithea.model import db, meta, userlog
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.comment import ChangesetCommentsModel
-from kallithea.model.db import ChangesetStatus, Gist, Permission, PullRequest, RepoGroup, Repository, Setting, User, UserGroup, UserIpMap
 from kallithea.model.gist import GistModel
-from kallithea.model.meta import Session
 from kallithea.model.pull_request import PullRequestModel
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
@@ -57,10 +55,10 @@
 
 def store_update(updates, attr, name):
     """
-    Stores param in updates dict if it's not instance of Optional
-    allows easy updates of passed in params
+    Stores param in updates dict if it's not None (i.e. if user explicitly set
+    a parameter). This allows easy updates of passed in params.
     """
-    if not isinstance(attr, Optional):
+    if attr is not None:
         updates[name] = attr
 
 
@@ -94,7 +92,7 @@
 
     :param repogroupid:
     """
-    repo_group = RepoGroup.guess_instance(repogroupid)
+    repo_group = db.RepoGroup.guess_instance(repogroupid)
     if repo_group is None:
         raise JSONRPCError(
             'repository group `%s` does not exist' % (repogroupid,))
@@ -119,7 +117,7 @@
 
     :param permid:
     """
-    perm = Permission.get_by_key(permid)
+    perm = db.Permission.get_by_key(permid)
     if perm is None:
         raise JSONRPCError('permission `%s` does not exist' % (permid,))
     if prefix:
@@ -161,7 +159,7 @@
         return args
 
     @HasPermissionAnyDecorator('hg.admin')
-    def pull(self, repoid, clone_uri=Optional(None)):
+    def pull(self, repoid, clone_uri=None):
         """
         Triggers a pull from remote location on given repo. Can be used to
         automatically keep remote repos up to date. This command can be executed
@@ -197,7 +195,7 @@
             ScmModel().pull_changes(repo.repo_name,
                                     request.authuser.username,
                                     request.ip_addr,
-                                    clone_uri=Optional.extract(clone_uri))
+                                    clone_uri=clone_uri)
             return dict(
                 msg='Pulled from `%s`' % repo.repo_name,
                 repository=repo.repo_name
@@ -209,7 +207,7 @@
             )
 
     @HasPermissionAnyDecorator('hg.admin')
-    def rescan_repos(self, remove_obsolete=Optional(False)):
+    def rescan_repos(self, remove_obsolete=False):
         """
         Triggers rescan repositories action. If remove_obsolete is set
         than also delete repos that are in database but not in the filesystem.
@@ -240,7 +238,7 @@
         """
 
         try:
-            rm_obsolete = Optional.extract(remove_obsolete)
+            rm_obsolete = remove_obsolete
             added, removed = repo2db_mapper(ScmModel().repo_scan(),
                                             remove_obsolete=rm_obsolete)
             return {'added': added, 'removed': removed}
@@ -295,7 +293,7 @@
             )
 
     @HasPermissionAnyDecorator('hg.admin')
-    def get_ip(self, userid=Optional(OAttr('apiuser'))):
+    def get_ip(self, userid=None):
         """
         Shows IP address as seen from Kallithea server, together with all
         defined IP addresses for given user. If userid is not passed data is
@@ -321,10 +319,10 @@
             }
 
         """
-        if isinstance(userid, Optional):
+        if userid is None:
             userid = request.authuser.user_id
         user = get_user_or_error(userid)
-        ips = UserIpMap.query().filter(UserIpMap.user == user).all()
+        ips = db.UserIpMap.query().filter(db.UserIpMap.user == user).all()
         return dict(
             server_ip_addr=request.ip_addr,
             user_ips=ips
@@ -350,9 +348,9 @@
           }
           error :  null
         """
-        return Setting.get_server_info()
+        return db.Setting.get_server_info()
 
-    def get_user(self, userid=Optional(OAttr('apiuser'))):
+    def get_user(self, userid=None):
         """
         Gets a user by username or user_id, Returns empty result if user is
         not found. If userid param is skipped it is set to id of user who is
@@ -397,12 +395,12 @@
         if not HasPermissionAny('hg.admin')():
             # make sure normal user does not pass someone else userid,
             # he is not allowed to do that
-            if not isinstance(userid, Optional) and userid != request.authuser.user_id:
+            if userid is not None and userid != request.authuser.user_id:
                 raise JSONRPCError(
                     'userid is not the same as your user'
                 )
 
-        if isinstance(userid, Optional):
+        if userid is None:
             userid = request.authuser.user_id
 
         user = get_user_or_error(userid)
@@ -426,17 +424,17 @@
 
         return [
             user.get_api_data()
-            for user in User.query()
-                .order_by(User.username)
+            for user in db.User.query()
+                .order_by(db.User.username)
                 .filter_by(is_default_user=False)
         ]
 
     @HasPermissionAnyDecorator('hg.admin')
-    def create_user(self, username, email, password=Optional(''),
-                    firstname=Optional(''), lastname=Optional(''),
-                    active=Optional(True), admin=Optional(False),
-                    extern_type=Optional(User.DEFAULT_AUTH_TYPE),
-                    extern_name=Optional('')):
+    def create_user(self, username, email, password='',
+                    firstname='', lastname='',
+                    active=True, admin=False,
+                    extern_type=db.User.DEFAULT_AUTH_TYPE,
+                    extern_name=''):
         """
         Creates new user. Returns new user object. This command can
         be executed only using api_key belonging to user with admin rights.
@@ -484,25 +482,25 @@
 
         """
 
-        if User.get_by_username(username):
+        if db.User.get_by_username(username):
             raise JSONRPCError("user `%s` already exist" % (username,))
 
-        if User.get_by_email(email):
+        if db.User.get_by_email(email):
             raise JSONRPCError("email `%s` already exist" % (email,))
 
         try:
             user = UserModel().create_or_update(
-                username=Optional.extract(username),
-                password=Optional.extract(password),
-                email=Optional.extract(email),
-                firstname=Optional.extract(firstname),
-                lastname=Optional.extract(lastname),
-                active=Optional.extract(active),
-                admin=Optional.extract(admin),
-                extern_type=Optional.extract(extern_type),
-                extern_name=Optional.extract(extern_name)
+                username=username,
+                password=password,
+                email=email,
+                firstname=firstname,
+                lastname=lastname,
+                active=active,
+                admin=admin,
+                extern_type=extern_type,
+                extern_name=extern_name
             )
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='created new user `%s`' % username,
                 user=user.get_api_data()
@@ -512,11 +510,11 @@
             raise JSONRPCError('failed to create user `%s`' % (username,))
 
     @HasPermissionAnyDecorator('hg.admin')
-    def update_user(self, userid, username=Optional(None),
-                    email=Optional(None), password=Optional(None),
-                    firstname=Optional(None), lastname=Optional(None),
-                    active=Optional(None), admin=Optional(None),
-                    extern_type=Optional(None), extern_name=Optional(None)):
+    def update_user(self, userid, username=None,
+                    email=None, password=None,
+                    firstname=None, lastname=None,
+                    active=None, admin=None,
+                    extern_type=None, extern_name=None):
         """
         updates given user if such user exists. This command can
         be executed only using api_key belonging to user with admin rights.
@@ -580,7 +578,7 @@
             store_update(updates, extern_type, 'extern_type')
 
             user = UserModel().update_user(user, **updates)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='updated user ID:%s %s' % (user.user_id, user.username),
                 user=user.get_api_data()
@@ -623,7 +621,7 @@
 
         try:
             UserModel().delete(userid)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='deleted user ID:%s %s' % (user.user_id, user.username),
                 user=None
@@ -682,12 +680,12 @@
 
         return [
             user_group.get_api_data()
-            for user_group in UserGroupList(UserGroup.query().all(), perm_level='read')
+            for user_group in UserGroupList(db.UserGroup.query().all(), perm_level='read')
         ]
 
     @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
-    def create_user_group(self, group_name, description=Optional(''),
-                          owner=Optional(OAttr('apiuser')), active=Optional(True)):
+    def create_user_group(self, group_name, description='',
+                          owner=None, active=True):
         """
         Creates new user group. This command can be executed only using api_key
         belonging to user with admin rights or an user who has create user group
@@ -727,15 +725,13 @@
             raise JSONRPCError("user group `%s` already exist" % (group_name,))
 
         try:
-            if isinstance(owner, Optional):
+            if owner is None:
                 owner = request.authuser.user_id
 
             owner = get_user_or_error(owner)
-            active = Optional.extract(active)
-            description = Optional.extract(description)
             ug = UserGroupModel().create(name=group_name, description=description,
                                          owner=owner, active=active)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='created new user group `%s`' % group_name,
                 user_group=ug.get_api_data()
@@ -745,9 +741,9 @@
             raise JSONRPCError('failed to create group `%s`' % (group_name,))
 
     # permission check inside
-    def update_user_group(self, usergroupid, group_name=Optional(''),
-                          description=Optional(''), owner=Optional(None),
-                          active=Optional(True)):
+    def update_user_group(self, usergroupid, group_name=None,
+                          description=None, owner=None,
+                          active=None):
         """
         Updates given usergroup.  This command can be executed only using api_key
         belonging to user with admin rights or an admin of given user group
@@ -786,7 +782,7 @@
             if not HasUserGroupPermissionLevel('admin')(user_group.users_group_name):
                 raise JSONRPCError('user group `%s` does not exist' % (usergroupid,))
 
-        if not isinstance(owner, Optional):
+        if owner is not None:
             owner = get_user_or_error(owner)
 
         updates = {}
@@ -796,7 +792,7 @@
         store_update(updates, active, 'users_group_active')
         try:
             UserGroupModel().update(user_group, updates)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='updated user group ID:%s %s' % (user_group.users_group_id,
                                                      user_group.users_group_name),
@@ -842,7 +838,7 @@
 
         try:
             UserGroupModel().delete(user_group)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='deleted user group ID:%s %s' %
                     (user_group.users_group_id, user_group.users_group_name),
@@ -903,7 +899,7 @@
                 user.username, user_group.users_group_name
             )
             msg = msg if success else 'User is already in that group'
-            Session().commit()
+            meta.Session().commit()
 
             return dict(
                 success=success,
@@ -951,7 +947,7 @@
                 user.username, user_group.users_group_name
             )
             msg = msg if success else "User wasn't in group"
-            Session().commit()
+            meta.Session().commit()
             return dict(success=success, msg=msg)
         except Exception:
             log.error(traceback.format_exc())
@@ -963,8 +959,8 @@
 
     # permission check inside
     def get_repo(self, repoid,
-                 with_revision_names=Optional(False),
-                 with_pullrequests=Optional(False)):
+                 with_revision_names=False,
+                 with_pullrequests=False):
         """
         Gets an existing repository by it's name or repository_id. Members will return
         either users_group or user associated to that repository. This command can be
@@ -1064,8 +1060,8 @@
             for uf in repo.followers
         ]
 
-        data = repo.get_api_data(with_revision_names=Optional.extract(with_revision_names),
-                                 with_pullrequests=Optional.extract(with_pullrequests))
+        data = repo.get_api_data(with_revision_names=with_revision_names,
+                                 with_pullrequests=with_pullrequests)
         data['members'] = members
         data['followers'] = followers
         return data
@@ -1101,9 +1097,9 @@
             error:  null
         """
         if not HasPermissionAny('hg.admin')():
-            repos = RepoModel().get_all_user_repos(user=request.authuser.user_id)
+            repos = request.authuser.get_all_user_repos()
         else:
-            repos = Repository.query()
+            repos = db.Repository.query()
 
         return [
             repo.get_api_data()
@@ -1112,7 +1108,7 @@
 
     # permission check inside
     def get_repo_nodes(self, repoid, revision, root_path,
-                       ret_type=Optional('all')):
+                       ret_type='all'):
         """
         returns a list of nodes and it's children in a flat list for a given path
         at given revision. It's possible to specify ret_type to show only `files` or
@@ -1147,7 +1143,6 @@
             if not HasRepoPermissionLevel('read')(repo.repo_name):
                 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
 
-        ret_type = Optional.extract(ret_type)
         _map = {}
         try:
             _d, _f = ScmModel().get_nodes(repo, revision, root_path,
@@ -1168,13 +1163,13 @@
             )
 
     # permission check inside
-    def create_repo(self, repo_name, owner=Optional(OAttr('apiuser')),
-                    repo_type=Optional('hg'), description=Optional(''),
-                    private=Optional(False), clone_uri=Optional(None),
-                    landing_rev=Optional('rev:tip'),
-                    enable_statistics=Optional(False),
-                    enable_downloads=Optional(False),
-                    copy_permissions=Optional(False)):
+    def create_repo(self, repo_name, owner=None,
+                    repo_type=None, description='',
+                    private=False, clone_uri=None,
+                    landing_rev='rev:tip',
+                    enable_statistics=None,
+                    enable_downloads=None,
+                    copy_permissions=False):
         """
         Creates a repository. The repository name contains the full path, but the
         parent repository group must exist. For example "foo/bar/baz" require the groups
@@ -1228,7 +1223,7 @@
         repo_name_parts = repo_name.split('/')
         if len(repo_name_parts) > 1:
             group_name = '/'.join(repo_name_parts[:-1])
-            repo_group = RepoGroup.get_by_group_name(group_name)
+            repo_group = db.RepoGroup.get_by_group_name(group_name)
             if repo_group is None:
                 raise JSONRPCError("repo group `%s` not found" % group_name)
             if not(HasPermissionAny('hg.admin')() or HasRepoGroupPermissionLevel('write')(group_name)):
@@ -1238,12 +1233,12 @@
                 raise JSONRPCError("no permission to create top level repo")
 
         if not HasPermissionAny('hg.admin')():
-            if not isinstance(owner, Optional):
+            if owner is not None:
                 # forbid setting owner for non-admins
                 raise JSONRPCError(
                     'Only Kallithea admin can specify `owner` param'
                 )
-        if isinstance(owner, Optional):
+        if owner is None:
             owner = request.authuser.user_id
 
         owner = get_user_or_error(owner)
@@ -1251,28 +1246,22 @@
         if RepoModel().get_by_repo_name(repo_name):
             raise JSONRPCError("repo `%s` already exist" % repo_name)
 
-        defs = Setting.get_default_repo_settings(strip_prefix=True)
-        if isinstance(private, Optional):
-            private = defs.get('repo_private') or Optional.extract(private)
-        if isinstance(repo_type, Optional):
+        defs = db.Setting.get_default_repo_settings(strip_prefix=True)
+        if private is None:
+            private = defs.get('repo_private') or False
+        if repo_type is None:
             repo_type = defs.get('repo_type')
-        if isinstance(enable_statistics, Optional):
+        if enable_statistics is None:
             enable_statistics = defs.get('repo_enable_statistics')
-        if isinstance(enable_downloads, Optional):
+        if enable_downloads is None:
             enable_downloads = defs.get('repo_enable_downloads')
 
-        clone_uri = Optional.extract(clone_uri)
-        description = Optional.extract(description)
-        landing_rev = Optional.extract(landing_rev)
-        copy_permissions = Optional.extract(copy_permissions)
-
         try:
             data = dict(
                 repo_name=repo_name_parts[-1],
                 repo_name_full=repo_name,
                 repo_type=repo_type,
                 repo_description=description,
-                owner=owner,
                 repo_private=private,
                 clone_uri=clone_uri,
                 repo_group=group_name,
@@ -1282,14 +1271,12 @@
                 repo_copy_permissions=copy_permissions,
             )
 
-            task = RepoModel().create(form_data=data, cur_user=owner.username)
-            task_id = task.task_id
+            RepoModel().create(form_data=data, cur_user=owner.username)
             # no commit, it's done in RepoModel, or async via celery
             return dict(
                 msg="Created new repository `%s`" % (repo_name,),
                 success=True,  # cannot return the repo data here since fork
                                # can be done async
-                task=task_id
             )
         except Exception:
             log.error(traceback.format_exc())
@@ -1297,13 +1284,13 @@
                 'failed to create repository `%s`' % (repo_name,))
 
     # permission check inside
-    def update_repo(self, repoid, name=Optional(None),
-                    owner=Optional(OAttr('apiuser')),
-                    group=Optional(None),
-                    description=Optional(''), private=Optional(False),
-                    clone_uri=Optional(None), landing_rev=Optional('rev:tip'),
-                    enable_statistics=Optional(False),
-                    enable_downloads=Optional(False)):
+    def update_repo(self, repoid, name=None,
+                    owner=None,
+                    group=None,
+                    description=None, private=None,
+                    clone_uri=None, landing_rev=None,
+                    enable_statistics=None,
+                    enable_downloads=None):
 
         """
         Updates repo
@@ -1330,7 +1317,7 @@
             ):
                 raise JSONRPCError('no permission to create (or move) top level repositories')
 
-            if not isinstance(owner, Optional):
+            if owner is not None:
                 # forbid setting owner for non-admins
                 raise JSONRPCError(
                     'Only Kallithea admin can specify `owner` param'
@@ -1338,7 +1325,7 @@
 
         updates = {}
         repo_group = group
-        if not isinstance(repo_group, Optional):
+        if repo_group is not None:
             repo_group = get_repo_group_or_error(repo_group)  # TODO: repos can thus currently not be moved to root
             if repo_group.group_id != repo.group_id:
                 if not(HasPermissionAny('hg.admin')() or HasRepoGroupPermissionLevel('write')(repo_group.group_name)):
@@ -1356,7 +1343,7 @@
             store_update(updates, enable_downloads, 'repo_enable_downloads')
 
             RepoModel().update(repo, **updates)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
                 repository=repo.get_api_data()
@@ -1368,9 +1355,9 @@
     # permission check inside
     @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
     def fork_repo(self, repoid, fork_name,
-                  owner=Optional(OAttr('apiuser')),
-                  description=Optional(''), copy_permissions=Optional(False),
-                  private=Optional(False), landing_rev=Optional('rev:tip')):
+                  owner=None,
+                  description='', copy_permissions=False,
+                  private=False, landing_rev='rev:tip'):
         """
         Creates a fork of given repo. In case of using celery this will
         immediately return success message, while fork is going to be created
@@ -1424,7 +1411,7 @@
         fork_name_parts = fork_name.split('/')
         if len(fork_name_parts) > 1:
             group_name = '/'.join(fork_name_parts[:-1])
-            repo_group = RepoGroup.get_by_group_name(group_name)
+            repo_group = db.RepoGroup.get_by_group_name(group_name)
             if repo_group is None:
                 raise JSONRPCError("repo group `%s` not found" % group_name)
             if not(HasPermissionAny('hg.admin')() or HasRepoGroupPermissionLevel('write')(group_name)):
@@ -1436,7 +1423,7 @@
         if HasPermissionAny('hg.admin')():
             pass
         elif HasRepoPermissionLevel('read')(repo.repo_name):
-            if not isinstance(owner, Optional):
+            if owner is not None:
                 # forbid setting owner for non-admins
                 raise JSONRPCError(
                     'Only Kallithea admin can specify `owner` param'
@@ -1444,7 +1431,7 @@
         else:
             raise JSONRPCError('repository `%s` does not exist' % (repoid,))
 
-        if isinstance(owner, Optional):
+        if owner is None:
             owner = request.authuser.user_id
 
         owner = get_user_or_error(owner)
@@ -1455,22 +1442,20 @@
                 repo_name_full=fork_name,
                 repo_group=group_name,
                 repo_type=repo.repo_type,
-                description=Optional.extract(description),
-                private=Optional.extract(private),
-                copy_permissions=Optional.extract(copy_permissions),
-                landing_rev=Optional.extract(landing_rev),
+                description=description,
+                private=private,
+                copy_permissions=copy_permissions,
+                landing_rev=landing_rev,
                 update_after_clone=False,
                 fork_parent_id=repo.repo_id,
             )
-            task = RepoModel().create_fork(form_data, cur_user=owner.username)
+            RepoModel().create_fork(form_data, cur_user=owner.username)
             # no commit, it's done in RepoModel, or async via celery
-            task_id = task.task_id
             return dict(
                 msg='Created fork of `%s` as `%s`' % (repo.repo_name,
                                                       fork_name),
                 success=True,  # cannot return the repo data here since fork
                                # can be done async
-                task=task_id
             )
         except Exception:
             log.error(traceback.format_exc())
@@ -1480,7 +1465,7 @@
             )
 
     # permission check inside
-    def delete_repo(self, repoid, forks=Optional('')):
+    def delete_repo(self, repoid, forks=''):
         """
         Deletes a repository. This command can be executed only using api_key belonging
         to user with admin rights or regular user that have admin access to repository.
@@ -1509,7 +1494,7 @@
                 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
 
         try:
-            handle_forks = Optional.extract(forks)
+            handle_forks = forks
             _forks_msg = ''
             _forks = [f for f in repo.forks]
             if handle_forks == 'detach':
@@ -1523,7 +1508,7 @@
                 )
 
             RepoModel().delete(repo, forks=forks)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Deleted repository `%s`%s' % (repo.repo_name, _forks_msg),
                 success=True
@@ -1564,7 +1549,7 @@
 
             RepoModel().grant_user_permission(repo=repo, user=user, perm=perm)
 
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Granted perm: `%s` for user: `%s` in repo: `%s`' % (
                     perm.permission_name, user.username, repo.repo_name
@@ -1604,7 +1589,7 @@
         user = get_user_or_error(userid)
         try:
             RepoModel().revoke_user_permission(repo=repo, user=user)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Revoked perm for user: `%s` in repo: `%s`' % (
                     user.username, repo.repo_name
@@ -1666,7 +1651,7 @@
             RepoModel().grant_user_group_permission(
                 repo=repo, group_name=user_group, perm=perm)
 
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Granted perm: `%s` for user group: `%s` in '
                     'repo: `%s`' % (
@@ -1716,7 +1701,7 @@
             RepoModel().revoke_user_group_permission(
                 repo=repo, group_name=user_group)
 
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Revoked perm for user group: `%s` in repo: `%s`' % (
                     user_group.users_group_name, repo.repo_name
@@ -1776,14 +1761,14 @@
         """
         return [
             repo_group.get_api_data()
-            for repo_group in RepoGroup.query()
+            for repo_group in db.RepoGroup.query()
         ]
 
     @HasPermissionAnyDecorator('hg.admin')
-    def create_repo_group(self, group_name, description=Optional(''),
-                          owner=Optional(OAttr('apiuser')),
-                          parent=Optional(None),
-                          copy_permissions=Optional(False)):
+    def create_repo_group(self, group_name, description='',
+                          owner=None,
+                          parent=None,
+                          copy_permissions=False):
         """
         Creates a repository group. This command can be executed only using
         api_key belonging to user with admin rights.
@@ -1817,17 +1802,16 @@
           }
 
         """
-        if RepoGroup.get_by_group_name(group_name):
+        if db.RepoGroup.get_by_group_name(group_name):
             raise JSONRPCError("repo group `%s` already exist" % (group_name,))
 
-        if isinstance(owner, Optional):
+        if owner is None:
             owner = request.authuser.user_id
-        group_description = Optional.extract(description)
-        parent_group = Optional.extract(parent)
-        if not isinstance(parent, Optional):
-            parent_group = get_repo_group_or_error(parent_group)
+        group_description = description
+        parent_group = None
+        if parent is not None:
+            parent_group = get_repo_group_or_error(parent)
 
-        copy_permissions = Optional.extract(copy_permissions)
         try:
             repo_group = RepoGroupModel().create(
                 group_name=group_name,
@@ -1836,7 +1820,7 @@
                 parent=parent_group,
                 copy_permissions=copy_permissions
             )
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='created new repo group `%s`' % group_name,
                 repo_group=repo_group.get_api_data()
@@ -1847,10 +1831,10 @@
             raise JSONRPCError('failed to create repo group `%s`' % (group_name,))
 
     @HasPermissionAnyDecorator('hg.admin')
-    def update_repo_group(self, repogroupid, group_name=Optional(''),
-                          description=Optional(''),
-                          owner=Optional(OAttr('apiuser')),
-                          parent=Optional(None)):
+    def update_repo_group(self, repogroupid, group_name=None,
+                          description=None,
+                          owner=None,
+                          parent=None):
         repo_group = get_repo_group_or_error(repogroupid)
 
         updates = {}
@@ -1860,7 +1844,7 @@
             store_update(updates, owner, 'owner')
             store_update(updates, parent, 'parent_group')
             repo_group = RepoGroupModel().update(repo_group, updates)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='updated repository group ID:%s %s' % (repo_group.group_id,
                                                            repo_group.group_name),
@@ -1900,7 +1884,7 @@
 
         try:
             RepoGroupModel().delete(repo_group)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='deleted repo group ID:%s %s' %
                     (repo_group.group_id, repo_group.group_name),
@@ -1914,7 +1898,7 @@
 
     # permission check inside
     def grant_user_permission_to_repo_group(self, repogroupid, userid,
-                                            perm, apply_to_children=Optional('none')):
+                                            perm, apply_to_children='none'):
         """
         Grant permission for user on given repository group, or update existing
         one if found. This command can be executed only using api_key belonging
@@ -1956,7 +1940,6 @@
 
         user = get_user_or_error(userid)
         perm = get_perm_or_error(perm, prefix='group.')
-        apply_to_children = Optional.extract(apply_to_children)
 
         try:
             RepoGroupModel().add_permission(repo_group=repo_group,
@@ -1964,7 +1947,7 @@
                                             obj_type="user",
                                             perm=perm,
                                             recursive=apply_to_children)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % (
                     perm.permission_name, apply_to_children, user.username, repo_group.name
@@ -1979,7 +1962,7 @@
 
     # permission check inside
     def revoke_user_permission_from_repo_group(self, repogroupid, userid,
-                                               apply_to_children=Optional('none')):
+                                               apply_to_children='none'):
         """
         Revoke permission for user on given repository group. This command can
         be executed only using api_key belonging to user with admin rights, or
@@ -2018,7 +2001,6 @@
                 raise JSONRPCError('repository group `%s` does not exist' % (repogroupid,))
 
         user = get_user_or_error(userid)
-        apply_to_children = Optional.extract(apply_to_children)
 
         try:
             RepoGroupModel().delete_permission(repo_group=repo_group,
@@ -2026,7 +2008,7 @@
                                                obj_type="user",
                                                recursive=apply_to_children)
 
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Revoked perm (recursive:%s) for user: `%s` in repo group: `%s`' % (
                     apply_to_children, user.username, repo_group.name
@@ -2042,7 +2024,7 @@
     # permission check inside
     def grant_user_group_permission_to_repo_group(
             self, repogroupid, usergroupid, perm,
-            apply_to_children=Optional('none')):
+            apply_to_children='none'):
         """
         Grant permission for user group on given repository group, or update
         existing one if found. This command can be executed only using
@@ -2089,15 +2071,13 @@
                 raise JSONRPCError(
                     'user group `%s` does not exist' % (usergroupid,))
 
-        apply_to_children = Optional.extract(apply_to_children)
-
         try:
             RepoGroupModel().add_permission(repo_group=repo_group,
                                             obj=user_group,
                                             obj_type="user_group",
                                             perm=perm,
                                             recursive=apply_to_children)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Granted perm: `%s` (recursive:%s) for user group: `%s` in repo group: `%s`' % (
                     perm.permission_name, apply_to_children,
@@ -2117,7 +2097,7 @@
     # permission check inside
     def revoke_user_group_permission_from_repo_group(
             self, repogroupid, usergroupid,
-            apply_to_children=Optional('none')):
+            apply_to_children='none'):
         """
         Revoke permission for user group on given repository. This command can be
         executed only using api_key belonging to user with admin rights, or
@@ -2159,14 +2139,12 @@
                 raise JSONRPCError(
                     'user group `%s` does not exist' % (usergroupid,))
 
-        apply_to_children = Optional.extract(apply_to_children)
-
         try:
             RepoGroupModel().delete_permission(repo_group=repo_group,
                                                obj=user_group,
                                                obj_type="user_group",
                                                recursive=apply_to_children)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='Revoked perm (recursive:%s) for user group: `%s` in repo group: `%s`' % (
                     apply_to_children, user_group.users_group_name, repo_group.name
@@ -2194,7 +2172,7 @@
                 raise JSONRPCError('gist `%s` does not exist' % (gistid,))
         return gist.get_api_data()
 
-    def get_gists(self, userid=Optional(OAttr('apiuser'))):
+    def get_gists(self, userid=None):
         """
         Get all gists for given user. If userid is empty returned gists
         are for user who called the api
@@ -2205,27 +2183,27 @@
         if not HasPermissionAny('hg.admin')():
             # make sure normal user does not pass someone else userid,
             # he is not allowed to do that
-            if not isinstance(userid, Optional) and userid != request.authuser.user_id:
+            if userid is not None and userid != request.authuser.user_id:
                 raise JSONRPCError(
                     'userid is not the same as your user'
                 )
 
-        if isinstance(userid, Optional):
+        if userid is None:
             user_id = request.authuser.user_id
         else:
             user_id = get_user_or_error(userid).user_id
 
         return [
             gist.get_api_data()
-            for gist in Gist().query()
+            for gist in db.Gist().query()
                 .filter_by(is_expired=False)
-                .filter(Gist.owner_id == user_id)
-                .order_by(Gist.created_on.desc())
+                .filter(db.Gist.owner_id == user_id)
+                .order_by(db.Gist.created_on.desc())
         ]
 
-    def create_gist(self, files, owner=Optional(OAttr('apiuser')),
-                    gist_type=Optional(Gist.GIST_PUBLIC), lifetime=Optional(-1),
-                    description=Optional('')):
+    def create_gist(self, files, owner=None,
+                    gist_type=db.Gist.GIST_PUBLIC, lifetime=-1,
+                    description=''):
 
         """
         Creates new Gist
@@ -2262,13 +2240,10 @@
 
         """
         try:
-            if isinstance(owner, Optional):
+            if owner is None:
                 owner = request.authuser.user_id
 
             owner = get_user_or_error(owner)
-            description = Optional.extract(description)
-            gist_type = Optional.extract(gist_type)
-            lifetime = Optional.extract(lifetime)
 
             gist = GistModel().create(description=description,
                                       owner=owner,
@@ -2276,7 +2251,7 @@
                                       gist_mapping=files,
                                       gist_type=gist_type,
                                       lifetime=lifetime)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='created new gist',
                 gist=gist.get_api_data()
@@ -2285,12 +2260,6 @@
             log.error(traceback.format_exc())
             raise JSONRPCError('failed to create gist')
 
-    # def update_gist(self, gistid, files, owner=Optional(OAttr('apiuser')),
-    #                 gist_type=Optional(Gist.GIST_PUBLIC),
-    #                 gist_lifetime=Optional(-1), gist_description=Optional('')):
-    #     gist = get_gist_or_error(gistid)
-    #     updates = {}
-
     # permission check inside
     def delete_gist(self, gistid):
         """
@@ -2324,7 +2293,7 @@
 
         try:
             GistModel().delete(gist)
-            Session().commit()
+            meta.Session().commit()
             return dict(
                 msg='deleted gist ID:%s' % (gist.gist_access_id,),
                 gist=None
@@ -2354,7 +2323,7 @@
             raise JSONRPCError('Repository is empty')
 
     # permission check inside
-    def get_changeset(self, repoid, raw_id, with_reviews=Optional(False)):
+    def get_changeset(self, repoid, raw_id, with_reviews=False):
         repo = get_repo_or_error(repoid)
         if not HasRepoPermissionLevel('read')(repo.repo_name):
             raise JSONRPCError('Access denied to repo %s' % repo.repo_name)
@@ -2364,7 +2333,6 @@
 
         info = dict(changeset.as_dict())
 
-        with_reviews = Optional.extract(with_reviews)
         if with_reviews:
             reviews = ChangesetStatusModel().get_statuses(
                                 repo.repo_name, raw_id)
@@ -2377,7 +2345,7 @@
         """
         Get given pull request by id
         """
-        pull_request = PullRequest.get(pullrequest_id)
+        pull_request = db.PullRequest.get(pullrequest_id)
         if pull_request is None:
             raise JSONRPCError('pull request `%s` does not exist' % (pullrequest_id,))
         if not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name):
@@ -2390,7 +2358,7 @@
         Add comment, close and change status of pull request.
         """
         apiuser = get_user_or_error(request.authuser.user_id)
-        pull_request = PullRequest.get(pull_request_id)
+        pull_request = db.PullRequest.get(pull_request_id)
         if pull_request is None:
             raise JSONRPCError('pull request `%s` does not exist' % (pull_request_id,))
         if (not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name)):
@@ -2412,10 +2380,10 @@
             pull_request=pull_request.pull_request_id,
             f_path=None,
             line_no=None,
-            status_change=ChangesetStatus.get_status_lbl(status),
+            status_change=db.ChangesetStatus.get_status_lbl(status),
             closing_pr=close_pr
         )
-        action_logger(apiuser,
+        userlog.action_logger(apiuser,
                       'user_commented_pull_request:%s' % pull_request_id,
                       pull_request.org_repo, request.ip_addr)
         if status:
@@ -2428,8 +2396,54 @@
             )
         if close_pr:
             PullRequestModel().close_pull_request(pull_request_id)
-            action_logger(apiuser,
+            userlog.action_logger(apiuser,
                           'user_closed_pull_request:%s' % pull_request_id,
                           pull_request.org_repo, request.ip_addr)
-        Session().commit()
+        meta.Session().commit()
         return True
+
+    # permission check inside
+    def edit_reviewers(self, pull_request_id, add=None, remove=None):
+        """
+        Add and/or remove one or more reviewers to a pull request, by username
+        or user ID. Reviewers are specified either as a single-user string or
+        as a JSON list of one or more strings.
+        """
+        if add is None and remove is None:
+            raise JSONRPCError('''Invalid request. Neither 'add' nor 'remove' is specified.''')
+
+        pull_request = db.PullRequest.get(pull_request_id)
+        if pull_request is None:
+            raise JSONRPCError('pull request `%s` does not exist' % (pull_request_id,))
+
+        apiuser = get_user_or_error(request.authuser.user_id)
+        is_owner = apiuser.user_id == pull_request.owner_id
+        is_repo_admin = HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name)
+        if not (apiuser.admin or is_repo_admin or is_owner):
+            raise JSONRPCError('No permission to edit reviewers of this pull request. User needs to be admin or pull request owner.')
+        if pull_request.is_closed():
+            raise JSONRPCError('Cannot edit reviewers of a closed pull request.')
+
+        if not isinstance(add, list):
+            add = [add]
+        if not isinstance(remove, list):
+            remove = [remove]
+
+        # look up actual user objects from given name or id. Bail out if unknown.
+        add_objs = set(get_user_or_error(user) for user in add if user is not None)
+        remove_objs = set(get_user_or_error(user) for user in remove if user is not None)
+
+        new_reviewers = redundant_reviewers = set()
+        if add_objs:
+            new_reviewers, redundant_reviewers = PullRequestModel().add_reviewers(apiuser, pull_request, add_objs)
+        if remove_objs:
+            PullRequestModel().remove_reviewers(apiuser, pull_request, remove_objs)
+
+        meta.Session().commit()
+
+        return {
+            'added': [x.username for x in new_reviewers],
+            'already_present': [x.username for x in redundant_reviewers],
+            # NOTE: no explicit check that removed reviewers were actually present.
+            'removed': [x.username for x in remove_objs],
+        }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/controllers/base.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,633 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+kallithea.controllers.base
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The base Controller API
+Provides the BaseController class for subclassing. And usage in different
+controllers
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: Oct 06, 2010
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+"""
+
+import base64
+import datetime
+import logging
+import traceback
+import warnings
+
+import decorator
+import paste.auth.basic
+import paste.httpexceptions
+import paste.httpheaders
+import webob.exc
+from tg import TGController, config, render_template, request, response, session
+from tg import tmpl_context as c
+from tg.i18n import ugettext as _
+
+import kallithea
+from kallithea.lib import auth_modules, ext_json, webutils
+from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
+from kallithea.lib.exceptions import UserCreationError
+from kallithea.lib.utils import get_repo_slug, is_valid_repo
+from kallithea.lib.utils2 import AttributeDict, asbool, ascii_bytes, safe_int, safe_str, set_hook_environment
+from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
+from kallithea.model.scm import ScmModel
+
+
+log = logging.getLogger(__name__)
+
+
+def render(template_path):
+    return render_template({'url': url}, 'mako', template_path)
+
+
+def _filter_proxy(ip):
+    """
+    HTTP_X_FORWARDED_FOR headers can have multiple IP addresses, with the
+    leftmost being the original client. Each proxy that is forwarding the
+    request will usually add the IP address it sees the request coming from.
+
+    The client might have provided a fake leftmost value before hitting the
+    first proxy, so if we have a proxy that is adding one IP address, we can
+    only trust the rightmost address.
+    """
+    if ',' in ip:
+        _ips = ip.split(',')
+        _first_ip = _ips[-1].strip()
+        log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
+        return _first_ip
+    return ip
+
+
+def get_ip_addr(environ):
+    """The web server will set REMOTE_ADDR to the unfakeable IP layer client IP address.
+    If using a proxy server, make it possible to use another value, such as
+    the X-Forwarded-For header, by setting `remote_addr_variable = HTTP_X_FORWARDED_FOR`.
+    """
+    remote_addr_variable = kallithea.CONFIG.get('remote_addr_variable', 'REMOTE_ADDR')
+    return _filter_proxy(environ.get(remote_addr_variable, '0.0.0.0'))
+
+
+def get_path_info(environ):
+    """Return PATH_INFO from environ ... using tg.original_request if available.
+
+    In Python 3 WSGI, PATH_INFO is a unicode str, but kind of contains encoded
+    bytes. The code points are guaranteed to only use the lower 8 bit bits, and
+    encoding the string with the 1:1 encoding latin1 will give the
+    corresponding byte string ... which then can be decoded to proper unicode.
+    """
+    org_req = environ.get('tg.original_request')
+    if org_req is not None:
+        environ = org_req.environ
+    return safe_str(environ['PATH_INFO'].encode('latin1'))
+
+
+def log_in_user(user, remember, is_external_auth, ip_addr):
+    """
+    Log a `User` in and update session and cookies. If `remember` is True,
+    the session cookie is set to expire in a year; otherwise, it expires at
+    the end of the browser session.
+
+    Returns populated `AuthUser` object.
+    """
+    # It should not be possible to explicitly log in as the default user.
+    assert not user.is_default_user, user
+
+    auth_user = AuthUser.make(dbuser=user, is_external_auth=is_external_auth, ip_addr=ip_addr)
+    if auth_user is None:
+        return None
+
+    user.update_lastlogin()
+    meta.Session().commit()
+
+    # Start new session to prevent session fixation attacks.
+    session.invalidate()
+    session['authuser'] = cookie = auth_user.to_cookie()
+
+    # If they want to be remembered, update the cookie.
+    # NOTE: Assumes that beaker defaults to browser session cookie.
+    if remember:
+        t = datetime.datetime.now() + datetime.timedelta(days=365)
+        session._set_cookie_expires(t)
+
+    session.save()
+
+    log.info('user %s is now authenticated and stored in '
+             'session, session attrs %s', user.username, cookie)
+
+    # dumps session attrs back to cookie
+    session._update_cookie_out()
+
+    return auth_user
+
+
+class BasicAuth(paste.auth.basic.AuthBasicAuthenticator):
+
+    def __init__(self, realm, authfunc, auth_http_code=None):
+        self.realm = realm
+        self.authfunc = authfunc
+        self._rc_auth_http_code = auth_http_code
+
+    def build_authentication(self, environ):
+        head = paste.httpheaders.WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
+        # Consume the whole body before sending a response
+        try:
+            request_body_size = int(environ.get('CONTENT_LENGTH', 0))
+        except (ValueError):
+            request_body_size = 0
+        environ['wsgi.input'].read(request_body_size)
+        if self._rc_auth_http_code and self._rc_auth_http_code == '403':
+            # return 403 if alternative http return code is specified in
+            # Kallithea config
+            return paste.httpexceptions.HTTPForbidden(headers=head)
+        return paste.httpexceptions.HTTPUnauthorized(headers=head)
+
+    def authenticate(self, environ):
+        authorization = paste.httpheaders.AUTHORIZATION(environ)
+        if not authorization:
+            return self.build_authentication(environ)
+        (authmeth, auth) = authorization.split(' ', 1)
+        if 'basic' != authmeth.lower():
+            return self.build_authentication(environ)
+        auth = safe_str(base64.b64decode(auth.strip()))
+        _parts = auth.split(':', 1)
+        if len(_parts) == 2:
+            username, password = _parts
+            if self.authfunc(username, password, environ) is not None:
+                return username
+        return self.build_authentication(environ)
+
+    __call__ = authenticate
+
+
+class BaseVCSController(object):
+    """Base controller for handling Mercurial/Git protocol requests
+    (coming from a VCS client, and not a browser).
+    """
+
+    scm_alias = None # 'hg' / 'git'
+
+    def __init__(self, application, config):
+        self.application = application
+        self.config = config
+        # base path of repo locations
+        self.basepath = self.config['base_path']
+        # authenticate this VCS request using the authentication modules
+        self.authenticate = BasicAuth('', auth_modules.authenticate,
+                                      config.get('auth_ret_code'))
+
+    @classmethod
+    def parse_request(cls, environ):
+        """If request is parsed as a request for this VCS, return a namespace with the parsed request.
+        If the request is unknown, return None.
+        """
+        raise NotImplementedError()
+
+    def _authorize(self, environ, action, repo_name, ip_addr):
+        """Authenticate and authorize user.
+
+        Since we're dealing with a VCS client and not a browser, we only
+        support HTTP basic authentication, either directly via raw header
+        inspection, or by using container authentication to delegate the
+        authentication to the web server.
+
+        Returns (user, None) on successful authentication and authorization.
+        Returns (None, wsgi_app) to send the wsgi_app response to the client.
+        """
+        # Use anonymous access if allowed for action on repo.
+        default_user = db.User.get_default_user()
+        default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
+        if default_authuser is None:
+            log.debug('No anonymous access at all') # move on to proper user auth
+        else:
+            if self._check_permission(action, default_authuser, repo_name):
+                return default_authuser, None
+            log.debug('Not authorized to access this repository as anonymous user')
+
+        username = None
+        #==============================================================
+        # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
+        # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
+        #==============================================================
+
+        # try to auth based on environ, container auth methods
+        log.debug('Running PRE-AUTH for container based authentication')
+        pre_auth = auth_modules.authenticate('', '', environ)
+        if pre_auth is not None and pre_auth.get('username'):
+            username = pre_auth['username']
+        log.debug('PRE-AUTH got %s as username', username)
+
+        # If not authenticated by the container, running basic auth
+        if not username:
+            self.authenticate.realm = self.config['realm']
+            result = self.authenticate(environ)
+            if isinstance(result, str):
+                paste.httpheaders.AUTH_TYPE.update(environ, 'basic')
+                paste.httpheaders.REMOTE_USER.update(environ, result)
+                username = result
+            else:
+                return None, result.wsgi_application
+
+        #==============================================================
+        # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
+        #==============================================================
+        try:
+            user = db.User.get_by_username_or_email(username)
+        except Exception:
+            log.error(traceback.format_exc())
+            return None, webob.exc.HTTPInternalServerError()
+
+        authuser = AuthUser.make(dbuser=user, ip_addr=ip_addr)
+        if authuser is None:
+            return None, webob.exc.HTTPForbidden()
+        if not self._check_permission(action, authuser, repo_name):
+            return None, webob.exc.HTTPForbidden()
+
+        return user, None
+
+    def _handle_request(self, environ, start_response):
+        raise NotImplementedError()
+
+    def _check_permission(self, action, authuser, repo_name):
+        """
+        :param action: 'push' or 'pull'
+        :param user: `AuthUser` instance
+        :param repo_name: repository name
+        """
+        if action == 'push':
+            if not HasPermissionAnyMiddleware('repository.write',
+                                              'repository.admin')(authuser,
+                                                                  repo_name):
+                return False
+
+        elif action == 'pull':
+            #any other action need at least read permission
+            if not HasPermissionAnyMiddleware('repository.read',
+                                              'repository.write',
+                                              'repository.admin')(authuser,
+                                                                  repo_name):
+                return False
+
+        else:
+            assert False, action
+
+        return True
+
+    def __call__(self, environ, start_response):
+        try:
+            # try parsing a request for this VCS - if it fails, call the wrapped app
+            parsed_request = self.parse_request(environ)
+            if parsed_request is None:
+                return self.application(environ, start_response)
+
+            # skip passing error to error controller
+            environ['pylons.status_code_redirect'] = True
+
+            # quick check if repo exists...
+            if not is_valid_repo(parsed_request.repo_name, self.basepath, self.scm_alias):
+                raise webob.exc.HTTPNotFound()
+
+            if parsed_request.action is None:
+                # Note: the client doesn't get the helpful error message
+                raise webob.exc.HTTPBadRequest('Unable to detect pull/push action for %r! Are you using a nonstandard command or client?' % parsed_request.repo_name)
+
+            #======================================================================
+            # CHECK PERMISSIONS
+            #======================================================================
+            ip_addr = get_ip_addr(environ)
+            user, response_app = self._authorize(environ, parsed_request.action, parsed_request.repo_name, ip_addr)
+            if response_app is not None:
+                return response_app(environ, start_response)
+
+            #======================================================================
+            # REQUEST HANDLING
+            #======================================================================
+            set_hook_environment(user.username, ip_addr,
+                parsed_request.repo_name, self.scm_alias, parsed_request.action)
+
+            try:
+                log.info('%s action on %s repo "%s" by "%s" from %s',
+                         parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr)
+                app = self._make_app(parsed_request)
+                return app(environ, start_response)
+            except Exception:
+                log.error(traceback.format_exc())
+                raise webob.exc.HTTPInternalServerError()
+
+        except webob.exc.HTTPException as e:
+            return e(environ, start_response)
+
+
+class BaseController(TGController):
+
+    def _before(self, *args, **kwargs):
+        """
+        _before is called before controller methods and after __call__
+        """
+        if request.needs_csrf_check:
+            # CSRF protection: Whenever a request has ambient authority (whether
+            # through a session cookie or its origin IP address), it must include
+            # the correct token, unless the HTTP method is GET or HEAD (and thus
+            # guaranteed to be side effect free. In practice, the only situation
+            # where we allow side effects without ambient authority is when the
+            # authority comes from an API key; and that is handled above.
+            token = request.POST.get(webutils.session_csrf_secret_name)
+            if not token or token != webutils.session_csrf_secret_token():
+                log.error('CSRF check failed')
+                raise webob.exc.HTTPForbidden()
+
+        c.kallithea_version = kallithea.__version__
+        settings = db.Setting.get_app_settings()
+
+        # Visual options
+        c.visual = AttributeDict({})
+
+        ## DB stored
+        c.visual.show_public_icon = asbool(settings.get('show_public_icon'))
+        c.visual.show_private_icon = asbool(settings.get('show_private_icon'))
+        c.visual.stylify_metalabels = asbool(settings.get('stylify_metalabels'))
+        c.visual.page_size = safe_int(settings.get('dashboard_items', 100))
+        c.visual.admin_grid_items = safe_int(settings.get('admin_grid_items', 100))
+        c.visual.repository_fields = asbool(settings.get('repository_fields'))
+        c.visual.show_version = asbool(settings.get('show_version'))
+        c.visual.use_gravatar = asbool(settings.get('use_gravatar'))
+        c.visual.gravatar_url = settings.get('gravatar_url')
+
+        c.ga_code = settings.get('ga_code')
+        # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code
+        if c.ga_code and '<' not in c.ga_code:
+            c.ga_code = '''<script type="text/javascript">
+                var _gaq = _gaq || [];
+                _gaq.push(['_setAccount', '%s']);
+                _gaq.push(['_trackPageview']);
+
+                (function() {
+                    var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
+                    ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
+                    var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
+                    })();
+            </script>''' % c.ga_code
+        c.site_name = settings.get('title')
+        c.clone_uri_tmpl = settings.get('clone_uri_tmpl') or db.Repository.DEFAULT_CLONE_URI
+        c.clone_ssh_tmpl = settings.get('clone_ssh_tmpl') or db.Repository.DEFAULT_CLONE_SSH
+
+        ## INI stored
+        c.visual.allow_repo_location_change = asbool(config.get('allow_repo_location_change', True))
+        c.visual.allow_custom_hooks_settings = asbool(config.get('allow_custom_hooks_settings', True))
+        c.ssh_enabled = asbool(config.get('ssh_enabled', False))
+
+        c.instance_id = config.get('instance_id')
+        c.issues_url = config.get('bugtracker', url('issues_url'))
+        # END CONFIG VARS
+
+        c.repo_name = get_repo_slug(request)  # can be empty
+        c.backends = list(kallithea.BACKENDS)
+
+        self.cut_off_limit = safe_int(config.get('cut_off_limit'))
+
+        c.my_pr_count = db.PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count()
+
+        self.scm_model = ScmModel()
+
+    @staticmethod
+    def _determine_auth_user(session_authuser, ip_addr):
+        """
+        Create an `AuthUser` object given the API key/bearer token
+        (if any) and the value of the authuser session cookie.
+        Returns None if no valid user is found (like not active or no access for IP).
+        """
+
+        # Authenticate by session cookie
+        # In ancient login sessions, 'authuser' may not be a dict.
+        # In that case, the user will have to log in again.
+        # v0.3 and earlier included an 'is_authenticated' key; if present,
+        # this must be True.
+        if isinstance(session_authuser, dict) and session_authuser.get('is_authenticated', True):
+            return AuthUser.from_cookie(session_authuser, ip_addr=ip_addr)
+
+        # Authenticate by auth_container plugin (if enabled)
+        if any(
+            plugin.is_container_auth
+            for plugin in auth_modules.get_auth_plugins()
+        ):
+            try:
+                user_info = auth_modules.authenticate('', '', request.environ)
+            except UserCreationError as e:
+                webutils.flash(e, 'error', logf=log.error)
+            else:
+                if user_info is not None:
+                    username = user_info['username']
+                    user = db.User.get_by_username(username, case_insensitive=True)
+                    return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr)
+
+        # User is default user (if active) or anonymous
+        default_user = db.User.get_default_user()
+        authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
+        if authuser is None: # fall back to anonymous
+            authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make?
+        return authuser
+
+    @staticmethod
+    def _basic_security_checks():
+        """Perform basic security/sanity checks before processing the request."""
+
+        # Only allow the following HTTP request methods.
+        if request.method not in ['GET', 'HEAD', 'POST']:
+            raise webob.exc.HTTPMethodNotAllowed()
+
+        # Also verify the _method override - no longer allowed.
+        if request.params.get('_method') is None:
+            pass # no override, no problem
+        else:
+            raise webob.exc.HTTPMethodNotAllowed()
+
+        # Make sure CSRF token never appears in the URL. If so, invalidate it.
+        if webutils.session_csrf_secret_name in request.GET:
+            log.error('CSRF key leak detected')
+            session.pop(webutils.session_csrf_secret_name, None)
+            session.save()
+            webutils.flash(_('CSRF token leak has been detected - all form tokens have been expired'),
+                    category='error')
+
+        # WebOb already ignores request payload parameters for anything other
+        # than POST/PUT, but double-check since other Kallithea code relies on
+        # this assumption.
+        if request.method not in ['POST', 'PUT'] and request.POST:
+            log.error('%r request with payload parameters; WebOb should have stopped this', request.method)
+            raise webob.exc.HTTPBadRequest()
+
+    def __call__(self, environ, context):
+        try:
+            ip_addr = get_ip_addr(environ)
+            self._basic_security_checks()
+
+            api_key = request.GET.get('api_key')
+            try:
+                # Request.authorization may raise ValueError on invalid input
+                type, params = request.authorization
+            except (ValueError, TypeError):
+                pass
+            else:
+                if type.lower() == 'bearer':
+                    api_key = params # bearer token is an api key too
+
+            if api_key is None:
+                authuser = self._determine_auth_user(
+                    session.get('authuser'),
+                    ip_addr=ip_addr,
+                )
+                needs_csrf_check = request.method not in ['GET', 'HEAD']
+
+            else:
+                dbuser = db.User.get_by_api_key(api_key)
+                if dbuser is None:
+                    log.info('No db user found for authentication with API key ****%s from %s',
+                             api_key[-4:], ip_addr)
+                authuser = AuthUser.make(dbuser=dbuser, is_external_auth=True, ip_addr=ip_addr)
+                needs_csrf_check = False # API key provides CSRF protection
+
+            if authuser is None:
+                log.info('No valid user found')
+                raise webob.exc.HTTPForbidden()
+
+            # set globals for auth user
+            request.authuser = authuser
+            request.ip_addr = ip_addr
+            request.needs_csrf_check = needs_csrf_check
+
+            log.info('IP: %s User: %s Request: %s',
+                request.ip_addr, request.authuser,
+                get_path_info(environ),
+            )
+            return super(BaseController, self).__call__(environ, context)
+        except webob.exc.HTTPException as e:
+            return e
+
+
+class BaseRepoController(BaseController):
+    """
+    Base class for controllers responsible for loading all needed data for
+    repository loaded items are
+
+    c.db_repo_scm_instance: instance of scm repository
+    c.db_repo: instance of db
+    c.repository_followers: number of followers
+    c.repository_forks: number of forks
+    c.repository_following: weather the current user is following the current repo
+    """
+
+    def _before(self, *args, **kwargs):
+        super(BaseRepoController, self)._before(*args, **kwargs)
+        if c.repo_name:  # extracted from request by base-base BaseController._before
+            _dbr = db.Repository.get_by_repo_name(c.repo_name)
+            if not _dbr:
+                return
+
+            log.debug('Found repository in database %s with state `%s`',
+                      _dbr, _dbr.repo_state)
+            route = getattr(request.environ.get('routes.route'), 'name', '')
+
+            # allow to delete repos that are somehow damages in filesystem
+            if route in ['delete_repo']:
+                return
+
+            if _dbr.repo_state in [db.Repository.STATE_PENDING]:
+                if route in ['repo_creating_home']:
+                    return
+                check_url = url('repo_creating_home', repo_name=c.repo_name)
+                raise webob.exc.HTTPFound(location=check_url)
+
+            dbr = c.db_repo = _dbr
+            c.db_repo_scm_instance = c.db_repo.scm_instance
+            if c.db_repo_scm_instance is None:
+                log.error('%s this repository is present in database but it '
+                          'cannot be created as an scm instance', c.repo_name)
+                webutils.flash(_('Repository not found in the filesystem'),
+                        category='error')
+                raise webob.exc.HTTPNotFound()
+
+            # some globals counter for menu
+            c.repository_followers = self.scm_model.get_followers(dbr)
+            c.repository_forks = self.scm_model.get_forks(dbr)
+            c.repository_pull_requests = self.scm_model.get_pull_requests(dbr)
+            c.repository_following = self.scm_model.is_following_repo(
+                                    c.repo_name, request.authuser.user_id)
+
+    @staticmethod
+    def _get_ref_rev(repo, ref_type, ref_name, returnempty=False):
+        """
+        Safe way to get changeset. If error occurs show error.
+        """
+        try:
+            return repo.scm_instance.get_ref_revision(ref_type, ref_name)
+        except EmptyRepositoryError as e:
+            if returnempty:
+                return repo.scm_instance.EMPTY_CHANGESET
+            webutils.flash(_('There are no changesets yet'), category='error')
+            raise webob.exc.HTTPNotFound()
+        except ChangesetDoesNotExistError as e:
+            webutils.flash(_('Changeset for %s %s not found in %s') %
+                              (ref_type, ref_name, repo.repo_name),
+                    category='error')
+            raise webob.exc.HTTPNotFound()
+        except RepositoryError as e:
+            log.error(traceback.format_exc())
+            webutils.flash(e, category='error')
+            raise webob.exc.HTTPBadRequest()
+
+
+@decorator.decorator
+def jsonify(func, *args, **kwargs):
+    """Action decorator that formats output for JSON
+
+    Given a function that will return content, this decorator will turn
+    the result into JSON, with a content-type of 'application/json' and
+    output it.
+    """
+    response.headers['Content-Type'] = 'application/json; charset=utf-8'
+    data = func(*args, **kwargs)
+    if isinstance(data, (list, tuple)):
+        # A JSON list response is syntactically valid JavaScript and can be
+        # loaded and executed as JavaScript by a malicious third-party site
+        # using <script>, which can lead to cross-site data leaks.
+        # JSON responses should therefore be scalars or objects (i.e. Python
+        # dicts), because a JSON object is a syntax error if intepreted as JS.
+        msg = "JSON responses with Array envelopes are susceptible to " \
+              "cross-site data leak attacks, see " \
+              "https://web.archive.org/web/20120519231904/http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
+        warnings.warn(msg, Warning, 2)
+        log.warning(msg)
+    log.debug("Returning JSON wrapped action output")
+    return ascii_bytes(ext_json.dumps(data))
+
+@decorator.decorator
+def IfSshEnabled(func, *args, **kwargs):
+    """Decorator for functions that can only be called if SSH access is enabled.
+
+    If SSH access is disabled in the configuration file, HTTPNotFound is raised.
+    """
+    if not c.ssh_enabled:
+        webutils.flash(_("SSH access is disabled."), category='warning')
+        raise webob.exc.HTTPNotFound()
+    return func(*args, **kwargs)
--- a/kallithea/controllers/changelog.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/changelog.py	Thu May 27 21:27:37 2021 +0200
@@ -33,20 +33,20 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPBadRequest, HTTPFound, HTTPNotFound
 
-import kallithea.lib.helpers as h
-from kallithea.config.routing import url
+from kallithea.controllers import base
+from kallithea.lib import webutils
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.graphmod import graph_data
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import safe_int
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, NodeDoesNotExistError, RepositoryError
+from kallithea.lib.webutils import url
 
 
 log = logging.getLogger(__name__)
 
 
-class ChangelogController(BaseRepoController):
+class ChangelogController(base.BaseRepoController):
 
     def _before(self, *args, **kwargs):
         super(ChangelogController, self)._before(*args, **kwargs)
@@ -64,10 +64,10 @@
         try:
             return c.db_repo_scm_instance.get_changeset(rev)
         except EmptyRepositoryError as e:
-            h.flash(_('There are no changesets yet'), category='error')
+            webutils.flash(_('There are no changesets yet'), category='error')
         except RepositoryError as e:
             log.error(traceback.format_exc())
-            h.flash(e, category='error')
+            webutils.flash(e, category='error')
         raise HTTPBadRequest()
 
     @LoginRequired(allow_default_user=True)
@@ -111,8 +111,8 @@
                         cs = self.__get_cs(revision, repo_name)
                         collection = cs.get_file_history(f_path)
                     except RepositoryError as e:
-                        h.flash(e, category='warning')
-                        raise HTTPFound(location=h.url('changelog_home', repo_name=repo_name))
+                        webutils.flash(e, category='warning')
+                        raise HTTPFound(location=webutils.url('changelog_home', repo_name=repo_name))
             else:
                 collection = c.db_repo_scm_instance.get_changesets(start=0, end=revision,
                                                         branch_name=branch_name, reverse=True)
@@ -125,11 +125,11 @@
             c.cs_comments = c.db_repo.get_comments(page_revisions)
             c.cs_statuses = c.db_repo.statuses(page_revisions)
         except EmptyRepositoryError as e:
-            h.flash(e, category='warning')
+            webutils.flash(e, category='warning')
             raise HTTPFound(location=url('summary_home', repo_name=c.repo_name))
         except (RepositoryError, ChangesetDoesNotExistError, Exception) as e:
             log.error(traceback.format_exc())
-            h.flash(e, category='error')
+            webutils.flash(e, category='error')
             raise HTTPFound(location=url('changelog_home', repo_name=c.repo_name))
 
         c.branch_name = branch_name
@@ -146,12 +146,12 @@
 
         c.revision = revision # requested revision ref
         c.first_revision = c.cs_pagination[0] # pagination is never empty here!
-        return render('changelog/changelog.html')
+        return base.render('changelog/changelog.html')
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
     def changelog_details(self, cs):
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
             c.cs = c.db_repo_scm_instance.get_changeset(cs)
-            return render('changelog/changelog_details.html')
+            return base.render('changelog/changelog_details.html')
         raise HTTPNotFound()
--- a/kallithea/controllers/changeset.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/changeset.py	Thu May 27 21:27:37 2021 +0200
@@ -28,7 +28,7 @@
 import binascii
 import logging
 import traceback
-from collections import OrderedDict, defaultdict
+from collections import OrderedDict
 
 from tg import request, response
 from tg import tmpl_context as c
@@ -36,136 +36,22 @@
 from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPNotFound
 
 import kallithea.lib.helpers as h
-from kallithea.lib import diffs
+from kallithea.controllers import base
+from kallithea.lib import auth, diffs, webutils
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, jsonify, render
 from kallithea.lib.graphmod import graph_data
-from kallithea.lib.utils import action_logger
 from kallithea.lib.utils2 import ascii_str, safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
+from kallithea.model import db, meta, userlog
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.comment import ChangesetCommentsModel
-from kallithea.model.db import ChangesetComment, ChangesetStatus
-from kallithea.model.meta import Session
 from kallithea.model.pull_request import PullRequestModel
 
 
 log = logging.getLogger(__name__)
 
 
-def _update_with_GET(params, GET):
-    for k in ['diff1', 'diff2', 'diff']:
-        params[k] += GET.getall(k)
-
-
-def anchor_url(revision, path, GET):
-    fid = h.FID(revision, path)
-    return h.url.current(anchor=fid, **dict(GET))
-
-
-def get_ignore_ws(fid, GET):
-    ig_ws_global = GET.get('ignorews')
-    ig_ws = [k for k in GET.getall(fid) if k.startswith('WS')]
-    if ig_ws:
-        try:
-            return int(ig_ws[0].split(':')[-1])
-        except ValueError:
-            raise HTTPBadRequest()
-    return ig_ws_global
-
-
-def _ignorews_url(GET, fileid=None):
-    fileid = str(fileid) if fileid else None
-    params = defaultdict(list)
-    _update_with_GET(params, GET)
-    lbl = _('Show whitespace')
-    ig_ws = get_ignore_ws(fileid, GET)
-    ln_ctx = get_line_ctx(fileid, GET)
-    # global option
-    if fileid is None:
-        if ig_ws is None:
-            params['ignorews'] += [1]
-            lbl = _('Ignore whitespace')
-        ctx_key = 'context'
-        ctx_val = ln_ctx
-    # per file options
-    else:
-        if ig_ws is None:
-            params[fileid] += ['WS:1']
-            lbl = _('Ignore whitespace')
-
-        ctx_key = fileid
-        ctx_val = 'C:%s' % ln_ctx
-    # if we have passed in ln_ctx pass it along to our params
-    if ln_ctx:
-        params[ctx_key] += [ctx_val]
-
-    params['anchor'] = fileid
-    icon = h.literal('<i class="icon-strike"></i>')
-    return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'})
-
-
-def get_line_ctx(fid, GET):
-    ln_ctx_global = GET.get('context')
-    if fid:
-        ln_ctx = [k for k in GET.getall(fid) if k.startswith('C')]
-    else:
-        _ln_ctx = [k for k in GET if k.startswith('C')]
-        ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
-        if ln_ctx:
-            ln_ctx = [ln_ctx]
-
-    if ln_ctx:
-        retval = ln_ctx[0].split(':')[-1]
-    else:
-        retval = ln_ctx_global
-
-    try:
-        return int(retval)
-    except Exception:
-        return 3
-
-
-def _context_url(GET, fileid=None):
-    """
-    Generates url for context lines
-
-    :param fileid:
-    """
-
-    fileid = str(fileid) if fileid else None
-    ig_ws = get_ignore_ws(fileid, GET)
-    ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
-
-    params = defaultdict(list)
-    _update_with_GET(params, GET)
-
-    # global option
-    if fileid is None:
-        if ln_ctx > 0:
-            params['context'] += [ln_ctx]
-
-        if ig_ws:
-            ig_ws_key = 'ignorews'
-            ig_ws_val = 1
-
-    # per file option
-    else:
-        params[fileid] += ['C:%s' % ln_ctx]
-        ig_ws_key = fileid
-        ig_ws_val = 'WS:%s' % 1
-
-    if ig_ws:
-        params[ig_ws_key] += [ig_ws_val]
-
-    lbl = _('Increase diff context to %(num)s lines') % {'num': ln_ctx}
-
-    params['anchor'] = fileid
-    icon = h.literal('<i class="icon-sort"></i>')
-    return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'})
-
-
 def create_cs_pr_comment(repo_name, revision=None, pull_request=None, allowed_to_change_status=True):
     """
     Add a comment to the specified changeset or pull request, using POST values
@@ -199,21 +85,21 @@
 
     if not allowed_to_change_status:
         if status or close_pr:
-            h.flash(_('No permission to change status'), 'error')
+            webutils.flash(_('No permission to change status'), 'error')
             raise HTTPForbidden()
 
     if pull_request and delete == "delete":
         if (pull_request.owner_id == request.authuser.user_id or
-            h.HasPermissionAny('hg.admin')() or
-            h.HasRepoPermissionLevel('admin')(pull_request.org_repo.repo_name) or
-            h.HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name)
+            auth.HasPermissionAny('hg.admin')() or
+            auth.HasRepoPermissionLevel('admin')(pull_request.org_repo.repo_name) or
+            auth.HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name)
         ) and not pull_request.is_closed():
             PullRequestModel().delete(pull_request)
-            Session().commit()
-            h.flash(_('Successfully deleted pull request %s') % pull_request_id,
+            meta.Session().commit()
+            webutils.flash(_('Successfully deleted pull request %s') % pull_request_id,
                     category='success')
             return {
-               'location': h.url('my_pullrequests'), # or repo pr list?
+               'location': webutils.url('my_pullrequests'), # or repo pr list?
             }
         raise HTTPForbidden()
 
@@ -227,7 +113,7 @@
         pull_request=pull_request_id,
         f_path=f_path or None,
         line_no=line_no or None,
-        status_change=ChangesetStatus.get_status_lbl(status) if status else None,
+        status_change=db.ChangesetStatus.get_status_lbl(status) if status else None,
         closing_pr=close_pr,
     )
 
@@ -245,30 +131,30 @@
         action = 'user_commented_pull_request:%s' % pull_request_id
     else:
         action = 'user_commented_revision:%s' % revision
-    action_logger(request.authuser, action, c.db_repo, request.ip_addr)
+    userlog.action_logger(request.authuser, action, c.db_repo, request.ip_addr)
 
     if pull_request and close_pr:
         PullRequestModel().close_pull_request(pull_request_id)
-        action_logger(request.authuser,
+        userlog.action_logger(request.authuser,
                       'user_closed_pull_request:%s' % pull_request_id,
                       c.db_repo, request.ip_addr)
 
-    Session().commit()
+    meta.Session().commit()
 
     data = {
-       'target_id': h.safeid(request.POST.get('f_path')),
+       'target_id': webutils.safeid(request.POST.get('f_path')),
     }
     if comment is not None:
         c.comment = comment
         data.update(comment.get_dict())
         data.update({'rendered_text':
-                     render('changeset/changeset_comment_block.html')})
+                     base.render('changeset/changeset_comment_block.html')})
 
     return data
 
 def delete_cs_pr_comment(repo_name, comment_id):
     """Delete a comment from a changeset or pull request"""
-    co = ChangesetComment.get_or_404(comment_id)
+    co = db.ChangesetComment.get_or_404(comment_id)
     if co.repo.repo_name != repo_name:
         raise HTTPNotFound()
     if co.pull_request and co.pull_request.is_closed():
@@ -276,15 +162,15 @@
         raise HTTPForbidden()
 
     owner = co.author_id == request.authuser.user_id
-    repo_admin = h.HasRepoPermissionLevel('admin')(repo_name)
-    if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
+    repo_admin = auth.HasRepoPermissionLevel('admin')(repo_name)
+    if auth.HasPermissionAny('hg.admin')() or repo_admin or owner:
         ChangesetCommentsModel().delete(comment=co)
-        Session().commit()
+        meta.Session().commit()
         return True
     else:
         raise HTTPForbidden()
 
-class ChangesetController(BaseRepoController):
+class ChangesetController(base.BaseRepoController):
 
     def _before(self, *args, **kwargs):
         super(ChangesetController, self)._before(*args, **kwargs)
@@ -292,17 +178,12 @@
 
     def _index(self, revision, method):
         c.pull_request = None
-        c.anchor_url = anchor_url
-        c.ignorews_url = _ignorews_url
-        c.context_url = _context_url
         c.fulldiff = request.GET.get('fulldiff') # for reporting number of changed files
         # get ranges of revisions if preset
         rev_range = revision.split('...')[:2]
-        enable_comments = True
         c.cs_repo = c.db_repo
         try:
             if len(rev_range) == 2:
-                enable_comments = False
                 rev_start = rev_range[0]
                 rev_end = rev_range[1]
                 rev_ranges = c.db_repo_scm_instance.get_changesets(start=rev_start,
@@ -317,7 +198,7 @@
         except (ChangesetDoesNotExistError, EmptyRepositoryError):
             log.debug(traceback.format_exc())
             msg = _('Such revision does not exist for this repository')
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
             raise HTTPNotFound()
 
         c.changes = OrderedDict()
@@ -325,7 +206,7 @@
         c.lines_added = 0  # count of lines added
         c.lines_deleted = 0  # count of lines removes
 
-        c.changeset_statuses = ChangesetStatus.STATUSES
+        c.changeset_statuses = db.ChangesetStatus.STATUSES
         comments = dict()
         c.statuses = []
         c.inline_comments = []
@@ -357,11 +238,10 @@
 
             cs2 = changeset.raw_id
             cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id
-            context_lcl = get_line_ctx('', request.GET)
-            ign_whitespace_lcl = get_ignore_ws('', request.GET)
-
+            ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET)
+            diff_context_size = h.get_diff_context_size(request.GET)
             raw_diff = diffs.get_diff(c.db_repo_scm_instance, cs1, cs2,
-                ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
+                ignore_whitespace=ignore_whitespace_diff, context=diff_context_size)
             diff_limit = None if c.fulldiff else self.cut_off_limit
             file_diff_data = []
             if method == 'show':
@@ -376,7 +256,7 @@
                     filename = f['filename']
                     fid = h.FID(changeset.raw_id, filename)
                     url_fid = h.FID('', filename)
-                    html_diff = diffs.as_html(enable_comments=enable_comments, parsed_lines=[f])
+                    html_diff = diffs.as_html(parsed_lines=[f])
                     file_diff_data.append((fid, url_fid, f['operation'], f['old_filename'], filename, html_diff, st))
             else:
                 # downloads/raw we only need RAW diff nothing else
@@ -405,19 +285,19 @@
         elif method == 'patch':
             response.content_type = 'text/plain'
             c.diff = safe_str(raw_diff)
-            return render('changeset/patch_changeset.html')
+            return base.render('changeset/patch_changeset.html')
         elif method == 'raw':
             response.content_type = 'text/plain'
             return raw_diff
         elif method == 'show':
             if len(c.cs_ranges) == 1:
-                return render('changeset/changeset.html')
+                return base.render('changeset/changeset.html')
             else:
                 c.cs_ranges_org = None
                 c.cs_comments = {}
                 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
                 c.jsdata = graph_data(c.db_repo_scm_instance, revs)
-                return render('changeset/changeset_range.html')
+                return base.render('changeset/changeset_range.html')
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
@@ -441,19 +321,19 @@
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def comment(self, repo_name, revision):
         return create_cs_pr_comment(repo_name, revision=revision)
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def delete_comment(self, repo_name, comment_id):
         return delete_cs_pr_comment(repo_name, comment_id)
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def changeset_info(self, repo_name, revision):
         if request.is_xhr:
             try:
@@ -465,7 +345,7 @@
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def changeset_children(self, repo_name, revision):
         if request.is_xhr:
             changeset = c.db_repo_scm_instance.get_changeset(revision)
@@ -478,7 +358,7 @@
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def changeset_parents(self, repo_name, revision):
         if request.is_xhr:
             changeset = c.db_repo_scm_instance.get_changeset(revision)
--- a/kallithea/controllers/compare.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/compare.py	Thu May 27 21:27:37 2021 +0200
@@ -28,29 +28,25 @@
 
 
 import logging
-import re
 
-import mercurial.unionrepo
 from tg import request
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 from webob.exc import HTTPBadRequest, HTTPFound, HTTPNotFound
 
-from kallithea.config.routing import url
-from kallithea.controllers.changeset import _context_url, _ignorews_url
-from kallithea.lib import diffs
-from kallithea.lib import helpers as h
+import kallithea.lib.helpers as h
+from kallithea.controllers import base
+from kallithea.lib import diffs, webutils
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.graphmod import graph_data
-from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes, safe_int
-from kallithea.model.db import Repository
+from kallithea.lib.webutils import url
+from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
 
 
-class CompareController(BaseRepoController):
+class CompareController(base.BaseRepoController):
 
     def _before(self, *args, **kwargs):
         super(CompareController, self)._before(*args, **kwargs)
@@ -63,122 +59,24 @@
         if other_repo is None:
             c.cs_repo = c.a_repo
         else:
-            c.cs_repo = Repository.get_by_repo_name(other_repo)
+            c.cs_repo = db.Repository.get_by_repo_name(other_repo)
             if c.cs_repo is None:
                 msg = _('Could not find other repository %s') % other_repo
-                h.flash(msg, category='error')
+                webutils.flash(msg, category='error')
                 raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
 
         # Verify that it's even possible to compare these two repositories.
         if c.a_repo.scm_instance.alias != c.cs_repo.scm_instance.alias:
             msg = _('Cannot compare repositories of different types')
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
             raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
 
-    @staticmethod
-    def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
-        """
-        Returns lists of changesets that can be merged from org_repo@org_rev
-        to other_repo@other_rev
-        ... and the other way
-        ... and the ancestors that would be used for merge
-
-        :param org_repo: repo object, that is most likely the original repo we forked from
-        :param org_rev: the revision we want our compare to be made
-        :param other_repo: repo object, most likely the fork of org_repo. It has
-            all changesets that we need to obtain
-        :param other_rev: revision we want out compare to be made on other_repo
-        """
-        ancestors = None
-        if org_rev == other_rev:
-            org_changesets = []
-            other_changesets = []
-
-        elif alias == 'hg':
-            # case two independent repos
-            if org_repo != other_repo:
-                hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui,
-                                                       safe_bytes(other_repo.path),
-                                                       safe_bytes(org_repo.path))
-                # all ancestors of other_rev will be in other_repo and
-                # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
-
-            # no remote compare do it on the same repository
-            else:
-                hgrepo = other_repo._repo
-
-            ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
-                         hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))]
-            if ancestors:
-                log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev)
-            else:
-                log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
-                ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
-                             hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive!
-
-            other_changesets = [
-                other_repo.get_changeset(rev)
-                for rev in hgrepo.revs(
-                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
-                    ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev))
-            ]
-            org_changesets = [
-                org_repo.get_changeset(ascii_str(hgrepo[rev].hex()))
-                for rev in hgrepo.revs(
-                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
-                    ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev))
-            ]
-
-        elif alias == 'git':
-            if org_repo != other_repo:
-                from dulwich.repo import Repo
-                from dulwich.client import SubprocessGitClient
-
-                gitrepo = Repo(org_repo.path)
-                SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
-
-                gitrepo_remote = Repo(other_repo.path)
-                SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote)
-
-                revs = [
-                    ascii_str(x.commit.id)
-                    for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)],
-                                                       exclude=[ascii_bytes(org_rev)])
-                ]
-                other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
-                if other_changesets:
-                    ancestors = [other_changesets[0].parents[0].raw_id]
-                else:
-                    # no changesets from other repo, ancestor is the other_rev
-                    ancestors = [other_rev]
-
-                gitrepo.close()
-                gitrepo_remote.close()
-
-            else:
-                so = org_repo.run_git_command(
-                    ['log', '--reverse', '--pretty=format:%H',
-                     '-s', '%s..%s' % (org_rev, other_rev)]
-                )
-                other_changesets = [org_repo.get_changeset(cs)
-                              for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
-                so = org_repo.run_git_command(
-                    ['merge-base', org_rev, other_rev]
-                )
-                ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]]
-            org_changesets = []
-
-        else:
-            raise Exception('Bad alias only git and hg is allowed')
-
-        return other_changesets, org_changesets, ancestors
-
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
     def index(self, repo_name):
         c.compare_home = True
         c.a_ref_name = c.cs_ref_name = None
-        return render('compare/compare_diff.html')
+        return base.render('compare/compare_diff.html')
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
@@ -202,18 +100,14 @@
         # is_ajax_preview puts hidden input field with changeset revisions
         c.is_ajax_preview = partial and request.GET.get('is_ajax_preview')
         # swap url for compare_diff page - never partial and never is_ajax_preview
-        c.swap_url = h.url('compare_url',
+        c.swap_url = webutils.url('compare_url',
             repo_name=c.cs_repo.repo_name,
             org_ref_type=other_ref_type, org_ref_name=other_ref_name,
             other_repo=c.a_repo.repo_name,
             other_ref_type=org_ref_type, other_ref_name=org_ref_name,
             merge=merge or '')
-
-        # set callbacks for generating markup for icons
-        c.ignorews_url = _ignorews_url
-        c.context_url = _context_url
-        ignore_whitespace = request.GET.get('ignorews') == '1'
-        line_context = safe_int(request.GET.get('context'), 3)
+        ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET)
+        diff_context_size = h.get_diff_context_size(request.GET)
 
         c.a_rev = self._get_ref_rev(c.a_repo, org_ref_type, org_ref_name,
             returnempty=True)
@@ -225,9 +119,8 @@
         c.cs_ref_name = other_ref_name
         c.cs_ref_type = other_ref_type
 
-        c.cs_ranges, c.cs_ranges_org, c.ancestors = self._get_changesets(
-            c.a_repo.scm_instance.alias, c.a_repo.scm_instance, c.a_rev,
-            c.cs_repo.scm_instance, c.cs_rev)
+        c.cs_ranges, c.cs_ranges_org, c.ancestors = c.a_repo.scm_instance.get_diff_changesets(
+            c.a_rev, c.cs_repo.scm_instance, c.cs_rev)
         raw_ids = [x.raw_id for x in c.cs_ranges]
         c.cs_comments = c.cs_repo.get_comments(raw_ids)
         c.cs_statuses = c.cs_repo.statuses(raw_ids)
@@ -236,7 +129,7 @@
         c.jsdata = graph_data(c.cs_repo.scm_instance, revs)
 
         if partial:
-            return render('compare/compare_cs.html')
+            return base.render('compare/compare_cs.html')
 
         org_repo = c.a_repo
         other_repo = c.cs_repo
@@ -252,7 +145,7 @@
             else:
                 msg = _('Multiple merge ancestors found for merge compare')
             if rev1 is None:
-                h.flash(msg, category='error')
+                webutils.flash(msg, category='error')
                 log.error(msg)
                 raise HTTPNotFound
 
@@ -266,7 +159,7 @@
             if org_repo != other_repo:
                 # TODO: we could do this by using hg unionrepo
                 log.error('cannot compare across repos %s and %s', org_repo, other_repo)
-                h.flash(_('Cannot compare repositories without using common ancestor'), category='error')
+                webutils.flash(_('Cannot compare repositories without using common ancestor'), category='error')
                 raise HTTPBadRequest
             rev1 = c.a_rev
 
@@ -275,8 +168,8 @@
         log.debug('running diff between %s and %s in %s',
                   rev1, c.cs_rev, org_repo.scm_instance.path)
         raw_diff = diffs.get_diff(org_repo.scm_instance, rev1=rev1, rev2=c.cs_rev,
-                                      ignore_whitespace=ignore_whitespace,
-                                      context=line_context)
+                                      ignore_whitespace=ignore_whitespace_diff,
+                                      context=diff_context_size)
 
         diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit)
         c.limited_diff = diff_processor.limited_diff
@@ -289,7 +182,7 @@
             c.lines_deleted += st['deleted']
             filename = f['filename']
             fid = h.FID('', filename)
-            html_diff = diffs.as_html(enable_comments=False, parsed_lines=[f])
+            html_diff = diffs.as_html(parsed_lines=[f])
             c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, html_diff, st))
 
-        return render('compare/compare_diff.html')
+        return base.render('compare/compare_diff.html')
--- a/kallithea/controllers/error.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/error.py	Thu May 27 21:27:37 2021 +0200
@@ -32,13 +32,13 @@
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 
-from kallithea.lib.base import BaseController
+from kallithea.controllers import base
 
 
 log = logging.getLogger(__name__)
 
 
-class ErrorController(BaseController):
+class ErrorController(base.BaseController):
     """Generates error documents as and when they are required.
 
     The errorpage middleware renders /error/document when error
--- a/kallithea/controllers/feed.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/feed.py	Thu May 27 21:27:37 2021 +0200
@@ -33,19 +33,19 @@
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 
-from kallithea import CONFIG
-from kallithea.lib import feeds
-from kallithea.lib import helpers as h
+import kallithea
+import kallithea.lib.helpers as h
+from kallithea.controllers import base
+from kallithea.lib import feeds, webutils
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController
 from kallithea.lib.diffs import DiffProcessor
-from kallithea.lib.utils2 import safe_int, safe_str, str2bool
+from kallithea.lib.utils2 import asbool, safe_int, safe_str
 
 
 log = logging.getLogger(__name__)
 
 
-class FeedController(BaseRepoController):
+class FeedController(base.BaseRepoController):
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
@@ -53,11 +53,11 @@
         super(FeedController, self)._before(*args, **kwargs)
 
     def _get_title(self, cs):
-        return h.shorter(cs.message, 160)
+        return webutils.shorter(cs.message, 160)
 
     def __get_desc(self, cs):
         desc_msg = [(_('%s committed on %s')
-                     % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
+                     % (h.person(cs.author), webutils.fmt_date(cs.date))) + '<br/>']
         # branches, tags, bookmarks
         for branch in cs.branches:
             desc_msg.append('branch: %s<br/>' % branch)
@@ -67,11 +67,11 @@
             desc_msg.append('tag: %s<br/>' % tag)
 
         changes = []
-        diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024))
+        diff_limit = safe_int(kallithea.CONFIG.get('rss_cut_off_limit', 32 * 1024))
         raw_diff = cs.diff()
         diff_processor = DiffProcessor(raw_diff,
                                        diff_limit=diff_limit,
-                                       inline_diff=False)
+                                       html=False)
 
         for st in diff_processor.parsed:
             st.update({'added': st['stats']['added'],
@@ -84,15 +84,15 @@
                                  _('Changeset was too big and was cut off...')]
 
         # rev link
-        _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
+        _url = webutils.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
                    revision=cs.raw_id)
         desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
 
         desc_msg.append('<pre>')
-        desc_msg.append(h.urlify_text(cs.message))
+        desc_msg.append(webutils.urlify_text(cs.message))
         desc_msg.append('\n')
         desc_msg.extend(changes)
-        if str2bool(CONFIG.get('rss_include_diff', False)):
+        if asbool(kallithea.CONFIG.get('rss_include_diff', False)):
             desc_msg.append('\n\n')
             desc_msg.append(safe_str(raw_diff))
         desc_msg.append('</pre>')
@@ -105,16 +105,16 @@
         def _get_feed_from_cache(*_cache_keys):  # parameters are not really used - only as caching key
             header = dict(
                 title=_('%s %s feed') % (c.site_name, repo_name),
-                link=h.canonical_url('summary_home', repo_name=repo_name),
+                link=webutils.canonical_url('summary_home', repo_name=repo_name),
                 description=_('Changes on %s repository') % repo_name,
             )
 
-            rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
+            rss_items_per_page = safe_int(kallithea.CONFIG.get('rss_items_per_page', 20))
             entries=[]
             for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
                 entries.append(dict(
                     title=self._get_title(cs),
-                    link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id),
+                    link=webutils.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id),
                     author_email=cs.author_email,
                     author_name=cs.author_name,
                     description=''.join(self.__get_desc(cs)),
--- a/kallithea/controllers/files.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/files.py	Thu May 27 21:27:37 2021 +0200
@@ -38,21 +38,21 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPFound, HTTPNotFound
 
-from kallithea.config.routing import url
-from kallithea.controllers.changeset import _context_url, _ignorews_url, anchor_url, get_ignore_ws, get_line_ctx
-from kallithea.lib import diffs
-from kallithea.lib import helpers as h
+import kallithea
+import kallithea.lib.helpers as h
+from kallithea.controllers import base
+from kallithea.lib import diffs, webutils
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, jsonify, render
 from kallithea.lib.exceptions import NonRelativePathError
-from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_int, safe_str, str2bool
+from kallithea.lib.utils2 import asbool, convert_line_endings, detect_mode, safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.conf import settings
 from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError,
                                           NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
 from kallithea.lib.vcs.nodes import FileNode
-from kallithea.model import db
+from kallithea.lib.vcs.utils import author_email
+from kallithea.lib.webutils import url
+from kallithea.model import userlog
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import ScmModel
 
@@ -60,7 +60,7 @@
 log = logging.getLogger(__name__)
 
 
-class FilesController(BaseRepoController):
+class FilesController(base.BaseRepoController):
 
     def _before(self, *args, **kwargs):
         super(FilesController, self)._before(*args, **kwargs)
@@ -82,15 +82,15 @@
             url_ = url('files_add_home',
                        repo_name=c.repo_name,
                        revision=0, f_path='', anchor='edit')
-            add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link")
-            h.flash(_('There are no files yet.') + ' ' + add_new, category='warning')
+            add_new = webutils.link_to(_('Click here to add new file'), url_, class_="alert-link")
+            webutils.flash(_('There are no files yet.') + ' ' + add_new, category='warning')
             raise HTTPNotFound()
         except (ChangesetDoesNotExistError, LookupError):
             msg = _('Such revision does not exist for this repository')
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
             raise HTTPNotFound()
         except RepositoryError as e:
-            h.flash(e, category='error')
+            webutils.flash(e, category='error')
             raise HTTPNotFound()
 
     def __get_filenode(self, cs, path):
@@ -107,10 +107,10 @@
                 raise RepositoryError('given path is a directory')
         except ChangesetDoesNotExistError:
             msg = _('Such revision does not exist for this repository')
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
             raise HTTPNotFound()
         except RepositoryError as e:
-            h.flash(e, category='error')
+            webutils.flash(e, category='error')
             raise HTTPNotFound()
 
         return file_node
@@ -171,30 +171,30 @@
 
                 c.authors = []
                 for a in set([x.author for x in _hist]):
-                    c.authors.append((h.email(a), h.person(a)))
+                    c.authors.append((author_email(a), h.person(a)))
             else:
                 c.authors = c.file_history = []
         except RepositoryError as e:
-            h.flash(e, category='error')
+            webutils.flash(e, category='error')
             raise HTTPNotFound()
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
-            return render('files/files_ypjax.html')
+            return base.render('files/files_ypjax.html')
 
         # TODO: tags and bookmarks?
         c.revision_options = [(c.changeset.raw_id,
-                              _('%s at %s') % (b, h.short_id(c.changeset.raw_id))) for b in c.changeset.branches] + \
+                              _('%s at %s') % (b, c.changeset.short_id)) for b in c.changeset.branches] + \
             [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
         if c.db_repo_scm_instance.closed_branches:
             prefix = _('(closed)') + ' '
             c.revision_options += [('-', '-')] + \
                 [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()]
 
-        return render('files/files.html')
+        return base.render('files/files.html')
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def history(self, repo_name, revision, f_path):
         changeset = self.__get_cs(revision)
         _file = changeset.get_node(f_path)
@@ -223,8 +223,8 @@
             file_history, _hist = self._get_node_history(changeset, f_path)
             c.authors = []
             for a in set([x.author for x in _hist]):
-                c.authors.append((h.email(a), h.person(a)))
-            return render('files/files_history_box.html')
+                c.authors.append((author_email(a), h.person(a)))
+            return base.render('files/files_history_box.html')
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
@@ -233,7 +233,7 @@
         file_node = self.__get_filenode(cs, f_path)
 
         response.content_disposition = \
-            'attachment; filename=%s' % f_path.split(db.URL_SEP)[-1]
+            'attachment; filename=%s' % f_path.split(kallithea.URL_SEP)[-1]
 
         response.content_type = file_node.mimetype
         return file_node.content
@@ -292,9 +292,9 @@
         _branches = repo.scm_instance.branches
         # check if revision is a branch name or branch hash
         if revision not in _branches and revision not in _branches.values():
-            h.flash(_('You can only delete files with revision '
+            webutils.flash(_('You can only delete files with revision '
                       'being a valid branch'), category='warning')
-            raise HTTPFound(location=h.url('files_home',
+            raise HTTPFound(location=webutils.url('files_home',
                                   repo_name=repo_name, revision='tip',
                                   f_path=f_path))
 
@@ -327,15 +327,15 @@
                     author=author,
                 )
 
-                h.flash(_('Successfully deleted file %s') % f_path,
+                webutils.flash(_('Successfully deleted file %s') % f_path,
                         category='success')
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during commit'), category='error')
+                webutils.flash(_('Error occurred during commit'), category='error')
             raise HTTPFound(location=url('changeset_home',
                                 repo_name=c.repo_name, revision='tip'))
 
-        return render('files/files_delete.html')
+        return base.render('files/files_delete.html')
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('write')
@@ -346,9 +346,9 @@
         _branches = repo.scm_instance.branches
         # check if revision is a branch name or branch hash
         if revision not in _branches and revision not in _branches.values():
-            h.flash(_('You can only edit files with revision '
+            webutils.flash(_('You can only edit files with revision '
                       'being a valid branch'), category='warning')
-            raise HTTPFound(location=h.url('files_home',
+            raise HTTPFound(location=webutils.url('files_home',
                                   repo_name=repo_name, revision='tip',
                                   f_path=f_path))
 
@@ -375,7 +375,7 @@
             author = request.authuser.full_contact
 
             if content == old_content:
-                h.flash(_('No changes'), category='warning')
+                webutils.flash(_('No changes'), category='warning')
                 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
                                     revision='tip'))
             try:
@@ -385,15 +385,15 @@
                                              ip_addr=request.ip_addr,
                                              author=author, message=message,
                                              content=content, f_path=f_path)
-                h.flash(_('Successfully committed to %s') % f_path,
+                webutils.flash(_('Successfully committed to %s') % f_path,
                         category='success')
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during commit'), category='error')
+                webutils.flash(_('Error occurred during commit'), category='error')
             raise HTTPFound(location=url('changeset_home',
                                 repo_name=c.repo_name, revision='tip'))
 
-        return render('files/files_edit.html')
+        return base.render('files/files_edit.html')
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('write')
@@ -425,11 +425,11 @@
                     content = content.file
 
             if not content:
-                h.flash(_('No content'), category='warning')
+                webutils.flash(_('No content'), category='warning')
                 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
                                     revision='tip'))
             if not filename:
-                h.flash(_('No filename'), category='warning')
+                webutils.flash(_('No filename'), category='warning')
                 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
                                     revision='tip'))
             # strip all crap out of file, just leave the basename
@@ -453,22 +453,22 @@
                     author=author,
                 )
 
-                h.flash(_('Successfully committed to %s') % node_path,
+                webutils.flash(_('Successfully committed to %s') % node_path,
                         category='success')
             except NonRelativePathError as e:
-                h.flash(_('Location must be relative path and must not '
+                webutils.flash(_('Location must be relative path and must not '
                           'contain .. in path'), category='warning')
                 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
                                     revision='tip'))
             except (NodeError, NodeAlreadyExistsError) as e:
-                h.flash(_(e), category='error')
+                webutils.flash(_(e), category='error')
             except Exception:
                 log.error(traceback.format_exc())
-                h.flash(_('Error occurred during commit'), category='error')
+                webutils.flash(_('Error occurred during commit'), category='error')
             raise HTTPFound(location=url('changeset_home',
                                 repo_name=c.repo_name, revision='tip'))
 
-        return render('files/files_add.html')
+        return base.render('files/files_add.html')
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
@@ -505,13 +505,12 @@
         except (ImproperArchiveTypeError, KeyError):
             return _('Unknown archive type')
 
-        from kallithea import CONFIG
         rev_name = cs.raw_id[:12]
         archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext)
 
         archive_path = None
         cached_archive_path = None
-        archive_cache_dir = CONFIG.get('archive_cache_dir')
+        archive_cache_dir = kallithea.CONFIG.get('archive_cache_dir')
         if archive_cache_dir and not subrepos: # TODO: subrepo caching?
             if not os.path.isdir(archive_cache_dir):
                 os.makedirs(archive_cache_dir)
@@ -547,7 +546,7 @@
                 log.debug('Destroying temp archive %s', archive_path)
                 os.remove(archive_path)
 
-        action_logger(user=request.authuser,
+        userlog.action_logger(user=request.authuser,
                       action='user_downloaded_archive:%s' % (archive_name),
                       repo=repo_name, ipaddr=request.ip_addr, commit=True)
 
@@ -558,8 +557,8 @@
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
     def diff(self, repo_name, f_path):
-        ignore_whitespace = request.GET.get('ignorews') == '1'
-        line_context = safe_int(request.GET.get('context'), 3)
+        ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET)
+        diff_context_size = h.get_diff_context_size(request.GET)
         diff2 = request.GET.get('diff2', '')
         diff1 = request.GET.get('diff1', '') or diff2
         c.action = request.GET.get('diff')
@@ -567,9 +566,6 @@
         c.f_path = f_path
         c.big_diff = False
         fulldiff = request.GET.get('fulldiff')
-        c.anchor_url = anchor_url
-        c.ignorews_url = _ignorews_url
-        c.context_url = _context_url
         c.changes = OrderedDict()
         c.changes[diff2] = []
 
@@ -577,7 +573,7 @@
         # to reduce JS and callbacks
 
         if request.GET.get('show_rev'):
-            if str2bool(request.GET.get('annotate', 'False')):
+            if asbool(request.GET.get('annotate', 'False')):
                 _url = url('files_annotate_home', repo_name=c.repo_name,
                            revision=diff1, f_path=c.f_path)
             else:
@@ -624,8 +620,8 @@
 
         if c.action == 'download':
             raw_diff = diffs.get_gitdiff(node1, node2,
-                                      ignore_whitespace=ignore_whitespace,
-                                      context=line_context)
+                                      ignore_whitespace=ignore_whitespace_diff,
+                                      context=diff_context_size)
             diff_name = '%s_vs_%s.diff' % (diff1, diff2)
             response.content_type = 'text/plain'
             response.content_disposition = (
@@ -635,26 +631,21 @@
 
         elif c.action == 'raw':
             raw_diff = diffs.get_gitdiff(node1, node2,
-                                      ignore_whitespace=ignore_whitespace,
-                                      context=line_context)
+                                      ignore_whitespace=ignore_whitespace_diff,
+                                      context=diff_context_size)
             response.content_type = 'text/plain'
             return raw_diff
 
         else:
             fid = h.FID(diff2, node2.path)
-            line_context_lcl = get_line_ctx(fid, request.GET)
-            ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
-
             diff_limit = None if fulldiff else self.cut_off_limit
-            c.a_rev, c.cs_rev, a_path, diff, st, op = diffs.wrapped_diff(filenode_old=node1,
+            c.a_rev, c.cs_rev, a_path, diff, st, op = diffs.html_diff(filenode_old=node1,
                                          filenode_new=node2,
                                          diff_limit=diff_limit,
-                                         ignore_whitespace=ign_whitespace_lcl,
-                                         line_context=line_context_lcl,
-                                         enable_comments=False)
+                                         ignore_whitespace=ignore_whitespace_diff,
+                                         line_context=diff_context_size)
             c.file_diff_data = [(fid, fid, op, a_path, node2.path, diff, st)]
-
-            return render('files/file_diff.html')
+            return base.render('files/file_diff.html')
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
@@ -695,14 +686,14 @@
                 node2 = FileNode(f_path, '', changeset=c.changeset_2)
         except ChangesetDoesNotExistError as e:
             msg = _('Such revision does not exist for this repository')
-            h.flash(msg, category='error')
+            webutils.flash(msg, category='error')
             raise HTTPNotFound()
         c.node1 = node1
         c.node2 = node2
         c.cs1 = c.changeset_1
         c.cs2 = c.changeset_2
 
-        return render('files/diff_2way.html')
+        return base.render('files/diff_2way.html')
 
     def _get_node_history(self, cs, f_path, changesets=None):
         """
@@ -745,7 +736,7 @@
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def nodelist(self, repo_name, revision, f_path):
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
             cs = self.__get_cs(revision)
--- a/kallithea/controllers/followers.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/followers.py	Thu May 27 21:27:37 2021 +0200
@@ -30,28 +30,28 @@
 from tg import request
 from tg import tmpl_context as c
 
+from kallithea.controllers import base
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import safe_int
-from kallithea.model.db import UserFollowing
+from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
 
 
-class FollowersController(BaseRepoController):
+class FollowersController(base.BaseRepoController):
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
     def followers(self, repo_name):
         p = safe_int(request.GET.get('page'), 1)
         repo_id = c.db_repo.repo_id
-        d = UserFollowing.get_repo_followers(repo_id) \
-            .order_by(UserFollowing.follows_from)
+        d = db.UserFollowing.get_repo_followers(repo_id) \
+            .order_by(db.UserFollowing.follows_from)
         c.followers_pager = Page(d, page=p, items_per_page=20)
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
-            return render('/followers/followers_data.html')
+            return base.render('/followers/followers_data.html')
 
-        return render('/followers/followers.html')
+        return base.render('/followers/followers.html')
--- a/kallithea/controllers/forks.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/forks.py	Thu May 27 21:27:37 2021 +0200
@@ -33,16 +33,15 @@
 from tg import request
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
-from webob.exc import HTTPFound
+from webob.exc import HTTPFound, HTTPNotFound
 
 import kallithea
-import kallithea.lib.helpers as h
-from kallithea.config.routing import url
-from kallithea.lib.auth import HasPermissionAny, HasPermissionAnyDecorator, HasRepoPermissionLevel, HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, render
+from kallithea.controllers import base
+from kallithea.lib import webutils
+from kallithea.lib.auth import HasPermissionAnyDecorator, HasRepoPermissionLevel, HasRepoPermissionLevelDecorator, LoginRequired
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import safe_int
-from kallithea.model.db import Repository, Ui, UserFollowing
+from kallithea.model import db
 from kallithea.model.forms import RepoForkForm
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import AvailableRepoGroupChoices, ScmModel
@@ -51,18 +50,14 @@
 log = logging.getLogger(__name__)
 
 
-class ForksController(BaseRepoController):
+class ForksController(base.BaseRepoController):
 
     def __load_defaults(self):
-        if HasPermissionAny('hg.create.write_on_repogroup.true')():
-            repo_group_perm_level = 'write'
-        else:
-            repo_group_perm_level = 'admin'
-        c.repo_groups = AvailableRepoGroupChoices(['hg.create.repository'], repo_group_perm_level)
+        c.repo_groups = AvailableRepoGroupChoices('write')
 
         c.landing_revs_choices, c.landing_revs = ScmModel().get_repo_landing_revs()
 
-        c.can_update = Ui.get_by_key('hooks', Ui.HOOK_UPDATE).ui_active
+        c.can_update = db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE).ui_active
 
     def __load_data(self):
         """
@@ -74,13 +69,12 @@
         repo = c.db_repo.scm_instance
 
         if c.repo_info is None:
-            h.not_mapped_error(c.repo_name)
-            raise HTTPFound(location=url('repos'))
+            raise HTTPNotFound()
 
         c.default_user_id = kallithea.DEFAULT_USER_ID
-        c.in_public_journal = UserFollowing.query() \
-            .filter(UserFollowing.user_id == c.default_user_id) \
-            .filter(UserFollowing.follows_repository == c.repo_info).scalar()
+        c.in_public_journal = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == c.default_user_id) \
+            .filter(db.UserFollowing.follows_repository == c.repo_info).scalar()
 
         if c.repo_info.stats:
             last_rev = c.repo_info.stats.stat_on_revision + 1
@@ -112,30 +106,29 @@
         p = safe_int(request.GET.get('page'), 1)
         repo_id = c.db_repo.repo_id
         d = []
-        for r in Repository.get_repo_forks(repo_id):
+        for r in db.Repository.get_repo_forks(repo_id):
             if not HasRepoPermissionLevel('read')(r.repo_name, 'get forks check'):
                 continue
             d.append(r)
         c.forks_pager = Page(d, page=p, items_per_page=20)
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
-            return render('/forks/forks_data.html')
+            return base.render('/forks/forks_data.html')
 
-        return render('/forks/forks.html')
+        return base.render('/forks/forks.html')
 
     @LoginRequired()
     @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
     @HasRepoPermissionLevelDecorator('read')
     def fork(self, repo_name):
-        c.repo_info = Repository.get_by_repo_name(repo_name)
+        c.repo_info = db.Repository.get_by_repo_name(repo_name)
         if not c.repo_info:
-            h.not_mapped_error(repo_name)
-            raise HTTPFound(location=url('home'))
+            raise HTTPNotFound()
 
         defaults = self.__load_data()
 
         return htmlfill.render(
-            render('forks/fork.html'),
+            base.render('forks/fork.html'),
             defaults=defaults,
             encoding="UTF-8",
             force_defaults=False)
@@ -145,26 +138,24 @@
     @HasRepoPermissionLevelDecorator('read')
     def fork_create(self, repo_name):
         self.__load_defaults()
-        c.repo_info = Repository.get_by_repo_name(repo_name)
+        c.repo_info = db.Repository.get_by_repo_name(repo_name)
         _form = RepoForkForm(old_data={'repo_type': c.repo_info.repo_type},
                              repo_groups=c.repo_groups,
                              landing_revs=c.landing_revs_choices)()
         form_result = {}
-        task_id = None
         try:
             form_result = _form.to_python(dict(request.POST))
 
             # an approximation that is better than nothing
-            if not Ui.get_by_key('hooks', Ui.HOOK_UPDATE).ui_active:
+            if not db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE).ui_active:
                 form_result['update_after_clone'] = False
 
             # create fork is done sometimes async on celery, db transaction
             # management is handled there.
-            task = RepoModel().create_fork(form_result, request.authuser.user_id)
-            task_id = task.task_id
+            RepoModel().create_fork(form_result, request.authuser.user_id)
         except formencode.Invalid as errors:
             return htmlfill.render(
-                render('forks/fork.html'),
+                base.render('forks/fork.html'),
                 defaults=errors.value,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -172,9 +163,9 @@
                 force_defaults=False)
         except Exception:
             log.error(traceback.format_exc())
-            h.flash(_('An error occurred during repository forking %s') %
+            webutils.flash(_('An error occurred during repository forking %s') %
                     repo_name, category='error')
 
-        raise HTTPFound(location=h.url('repo_creating_home',
+        raise HTTPFound(location=webutils.url('repo_creating_home',
                               repo_name=form_result['repo_name_full'],
-                              task_id=task_id))
+                              ))
--- a/kallithea/controllers/home.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/home.py	Thu May 27 21:27:37 2021 +0200
@@ -34,11 +34,11 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPBadRequest
 
-from kallithea.lib import helpers as h
+import kallithea.lib.helpers as h
+from kallithea.controllers import base
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseController, jsonify, render
 from kallithea.lib.utils2 import safe_str
-from kallithea.model.db import RepoGroup, Repository, User, UserGroup
+from kallithea.model import db
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import UserGroupList
 
@@ -46,31 +46,31 @@
 log = logging.getLogger(__name__)
 
 
-class HomeController(BaseController):
+class HomeController(base.BaseController):
 
     def about(self):
-        return render('/about.html')
+        return base.render('/about.html')
 
     @LoginRequired(allow_default_user=True)
     def index(self):
         c.group = None
 
         repo_groups_list = self.scm_model.get_repo_groups()
-        repos_list = Repository.query(sorted=True).filter_by(group=None).all()
+        repos_list = db.Repository.query(sorted=True).filter_by(group=None).all()
 
         c.data = RepoModel().get_repos_as_dict(repos_list,
                                                repo_groups_list=repo_groups_list,
                                                short_name=True)
 
-        return render('/index.html')
+        return base.render('/index.html')
 
     @LoginRequired(allow_default_user=True)
-    @jsonify
+    @base.jsonify
     def repo_switcher_data(self):
         if request.is_xhr:
-            all_repos = Repository.query(sorted=True).all()
+            all_repos = db.Repository.query(sorted=True).all()
             repo_iter = self.scm_model.get_repos(all_repos)
-            all_groups = RepoGroup.query(sorted=True).all()
+            all_groups = db.RepoGroup.query(sorted=True).all()
             repo_groups_iter = self.scm_model.get_repo_groups(all_groups)
 
             res = [{
@@ -109,9 +109,9 @@
 
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def repo_refs_data(self, repo_name):
-        repo = Repository.get_by_repo_name(repo_name).scm_instance
+        repo = db.Repository.get_by_repo_name(repo_name).scm_instance
         res = []
         _branches = repo.branches.items()
         if _branches:
@@ -144,7 +144,7 @@
         return data
 
     @LoginRequired()
-    @jsonify
+    @base.jsonify
     def users_and_groups_data(self):
         """
         Returns 'results' with a list of users and user groups.
@@ -163,19 +163,20 @@
         if 'users' in types:
             user_list = []
             if key:
-                u = User.get_by_username(key)
+                u = db.User.get_by_username(key)
                 if u:
                     user_list = [u]
             elif query:
-                user_list = User.query() \
-                    .filter(User.is_default_user == False) \
-                    .filter(User.active == True) \
+                user_list = db.User.query() \
+                    .filter(db.User.is_default_user == False) \
+                    .filter(db.User.active == True) \
                     .filter(or_(
-                        User.username.ilike("%%" + query + "%%"),
-                        User.name.ilike("%%" + query + "%%"),
-                        User.lastname.ilike("%%" + query + "%%"),
+                        db.User.username.ilike("%%" + query + "%%"),
+                        db.User.name.concat(' ').concat(db.User.lastname).ilike("%%" + query + "%%"),
+                        db.User.lastname.concat(' ').concat(db.User.name).ilike("%%" + query + "%%"),
+                        db.User.email.ilike("%%" + query + "%%"),
                     )) \
-                    .order_by(User.username) \
+                    .order_by(db.User.username) \
                     .limit(500) \
                     .all()
             for u in user_list:
@@ -191,14 +192,14 @@
         if 'groups' in types:
             grp_list = []
             if key:
-                grp = UserGroup.get_by_group_name(key)
+                grp = db.UserGroup.get_by_group_name(key)
                 if grp:
                     grp_list = [grp]
             elif query:
-                grp_list = UserGroup.query() \
-                    .filter(UserGroup.users_group_name.ilike("%%" + query + "%%")) \
-                    .filter(UserGroup.users_group_active == True) \
-                    .order_by(UserGroup.users_group_name) \
+                grp_list = db.UserGroup.query() \
+                    .filter(db.UserGroup.users_group_name.ilike("%%" + query + "%%")) \
+                    .filter(db.UserGroup.users_group_active == True) \
+                    .order_by(db.UserGroup.users_group_name) \
                     .limit(500) \
                     .all()
             for g in UserGroupList(grp_list, perm_level='read'):
--- a/kallithea/controllers/journal.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/journal.py	Thu May 27 21:27:37 2021 +0200
@@ -37,14 +37,13 @@
 from webob.exc import HTTPBadRequest
 
 import kallithea.lib.helpers as h
+from kallithea.controllers import base
 from kallithea.controllers.admin.admin import _journal_filter
-from kallithea.lib import feeds
+from kallithea.lib import feeds, webutils
 from kallithea.lib.auth import LoginRequired
-from kallithea.lib.base import BaseController, render
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import AttributeDict, safe_int
-from kallithea.model.db import Repository, User, UserFollowing, UserLog
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo import RepoModel
 
 
@@ -56,7 +55,7 @@
 feed_nr = 20
 
 
-class JournalController(BaseController):
+class JournalController(base.BaseController):
 
     def _before(self, *args, **kwargs):
         super(JournalController, self)._before(*args, **kwargs)
@@ -84,20 +83,20 @@
         filtering_criterion = None
 
         if repo_ids and user_ids:
-            filtering_criterion = or_(UserLog.repository_id.in_(repo_ids),
-                        UserLog.user_id.in_(user_ids))
+            filtering_criterion = or_(db.UserLog.repository_id.in_(repo_ids),
+                        db.UserLog.user_id.in_(user_ids))
         if repo_ids and not user_ids:
-            filtering_criterion = UserLog.repository_id.in_(repo_ids)
+            filtering_criterion = db.UserLog.repository_id.in_(repo_ids)
         if not repo_ids and user_ids:
-            filtering_criterion = UserLog.user_id.in_(user_ids)
+            filtering_criterion = db.UserLog.user_id.in_(user_ids)
         if filtering_criterion is not None:
-            journal = UserLog.query() \
-                .options(joinedload(UserLog.user)) \
-                .options(joinedload(UserLog.repository))
+            journal = db.UserLog.query() \
+                .options(joinedload(db.UserLog.user)) \
+                .options(joinedload(db.UserLog.repository))
             # filter
             journal = _journal_filter(journal, c.search_term)
             journal = journal.filter(filtering_criterion) \
-                        .order_by(UserLog.action_date.desc())
+                        .order_by(db.UserLog.action_date.desc())
         else:
             journal = []
 
@@ -126,13 +125,13 @@
                                     entry.repository.repo_name)
             _url = None
             if entry.repository is not None:
-                _url = h.canonical_url('changelog_home',
+                _url = webutils.canonical_url('changelog_home',
                            repo_name=entry.repository.repo_name)
 
             entries.append(dict(
                 title=title,
                 pubdate=entry.action_date,
-                link=_url or h.canonical_url(''),
+                link=_url or webutils.canonical_url(''),
                 author_email=user.email,
                 author_name=user.full_name_or_username,
                 description=action_extra(),
@@ -142,22 +141,22 @@
 
     def _atom_feed(self, repos, public=True):
         if public:
-            link = h.canonical_url('public_journal_atom')
+            link = webutils.canonical_url('public_journal_atom')
             desc = '%s %s %s' % (c.site_name, _('Public Journal'),
                                   'atom feed')
         else:
-            link = h.canonical_url('journal_atom')
+            link = webutils.canonical_url('journal_atom')
             desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed')
 
         return self._feed(repos, feeds.AtomFeed, link, desc)
 
     def _rss_feed(self, repos, public=True):
         if public:
-            link = h.canonical_url('public_journal_atom')
+            link = webutils.canonical_url('public_journal_atom')
             desc = '%s %s %s' % (c.site_name, _('Public Journal'),
                                   'rss feed')
         else:
-            link = h.canonical_url('journal_atom')
+            link = webutils.canonical_url('journal_atom')
             desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed')
 
         return self._feed(repos, feeds.RssFeed, link, desc)
@@ -166,10 +165,10 @@
     def index(self):
         # Return a rendered template
         p = safe_int(request.GET.get('page'), 1)
-        c.user = User.get(request.authuser.user_id)
-        c.following = UserFollowing.query() \
-            .filter(UserFollowing.user_id == request.authuser.user_id) \
-            .options(joinedload(UserFollowing.follows_repository)) \
+        c.user = db.User.get(request.authuser.user_id)
+        c.following = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == request.authuser.user_id) \
+            .options(joinedload(db.UserFollowing.follows_repository)) \
             .all()
 
         journal = self._get_journal_data(c.following)
@@ -179,32 +178,32 @@
         c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager)
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
-            return render('journal/journal_data.html')
+            return base.render('journal/journal_data.html')
 
-        repos_list = Repository.query(sorted=True) \
+        repos_list = db.Repository.query(sorted=True) \
             .filter_by(owner_id=request.authuser.user_id).all()
 
         repos_data = RepoModel().get_repos_as_dict(repos_list, admin=True)
         # data used to render the grid
         c.data = repos_data
 
-        return render('journal/journal.html')
+        return base.render('journal/journal.html')
 
     @LoginRequired()
     def journal_atom(self):
         """Produce a simple atom-1.0 feed"""
-        following = UserFollowing.query() \
-            .filter(UserFollowing.user_id == request.authuser.user_id) \
-            .options(joinedload(UserFollowing.follows_repository)) \
+        following = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == request.authuser.user_id) \
+            .options(joinedload(db.UserFollowing.follows_repository)) \
             .all()
         return self._atom_feed(following, public=False)
 
     @LoginRequired()
     def journal_rss(self):
         """Produce a simple rss2 feed"""
-        following = UserFollowing.query() \
-            .filter(UserFollowing.user_id == request.authuser.user_id) \
-            .options(joinedload(UserFollowing.follows_repository)) \
+        following = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == request.authuser.user_id) \
+            .options(joinedload(db.UserFollowing.follows_repository)) \
             .all()
         return self._rss_feed(following, public=False)
 
@@ -215,7 +214,7 @@
             try:
                 self.scm_model.toggle_following_user(user_id,
                                             request.authuser.user_id)
-                Session().commit()
+                meta.Session().commit()
                 return 'ok'
             except Exception:
                 log.error(traceback.format_exc())
@@ -226,7 +225,7 @@
             try:
                 self.scm_model.toggle_following_repo(repo_id,
                                             request.authuser.user_id)
-                Session().commit()
+                meta.Session().commit()
                 return 'ok'
             except Exception:
                 log.error(traceback.format_exc())
@@ -239,9 +238,9 @@
         # Return a rendered template
         p = safe_int(request.GET.get('page'), 1)
 
-        c.following = UserFollowing.query() \
-            .filter(UserFollowing.user_id == request.authuser.user_id) \
-            .options(joinedload(UserFollowing.follows_repository)) \
+        c.following = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == request.authuser.user_id) \
+            .options(joinedload(db.UserFollowing.follows_repository)) \
             .all()
 
         journal = self._get_journal_data(c.following)
@@ -251,16 +250,16 @@
         c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager)
 
         if request.environ.get('HTTP_X_PARTIAL_XHR'):
-            return render('journal/journal_data.html')
+            return base.render('journal/journal_data.html')
 
-        return render('journal/public_journal.html')
+        return base.render('journal/public_journal.html')
 
     @LoginRequired(allow_default_user=True)
     def public_journal_atom(self):
         """Produce a simple atom-1.0 feed"""
-        c.following = UserFollowing.query() \
-            .filter(UserFollowing.user_id == request.authuser.user_id) \
-            .options(joinedload(UserFollowing.follows_repository)) \
+        c.following = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == request.authuser.user_id) \
+            .options(joinedload(db.UserFollowing.follows_repository)) \
             .all()
 
         return self._atom_feed(c.following)
@@ -268,9 +267,9 @@
     @LoginRequired(allow_default_user=True)
     def public_journal_rss(self):
         """Produce a simple rss2 feed"""
-        c.following = UserFollowing.query() \
-            .filter(UserFollowing.user_id == request.authuser.user_id) \
-            .options(joinedload(UserFollowing.follows_repository)) \
+        c.following = db.UserFollowing.query() \
+            .filter(db.UserFollowing.user_id == request.authuser.user_id) \
+            .options(joinedload(db.UserFollowing.follows_repository)) \
             .all()
 
         return self._rss_feed(c.following)
--- a/kallithea/controllers/login.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/login.py	Thu May 27 21:27:37 2021 +0200
@@ -36,21 +36,21 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPBadRequest, HTTPFound
 
-import kallithea.lib.helpers as h
-from kallithea.config.routing import url
+from kallithea.controllers import base
+from kallithea.lib import webutils
 from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator
-from kallithea.lib.base import BaseController, log_in_user, render
 from kallithea.lib.exceptions import UserCreationError
-from kallithea.model.db import Setting, User
+from kallithea.lib.recaptcha import submit
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.forms import LoginForm, PasswordResetConfirmationForm, PasswordResetRequestForm, RegisterForm
-from kallithea.model.meta import Session
 from kallithea.model.user import UserModel
 
 
 log = logging.getLogger(__name__)
 
 
-class LoginController(BaseController):
+class LoginController(base.BaseController):
 
     def _validate_came_from(self, came_from,
             _re=re.compile(r"/(?!/)[-!#$%&'()*+,./:;=?@_~0-9A-Za-z]*$")):
@@ -82,14 +82,14 @@
                 # login_form will check username/password using ValidAuth and report failure to the user
                 c.form_result = login_form.to_python(dict(request.POST))
                 username = c.form_result['username']
-                user = User.get_by_username_or_email(username)
+                user = db.User.get_by_username_or_email(username)
                 assert user is not None  # the same user get just passed in the form validation
             except formencode.Invalid as errors:
                 defaults = errors.value
                 # remove password from filling in form again
                 defaults.pop('password', None)
                 return htmlfill.render(
-                    render('/login.html'),
+                    base.render('/login.html'),
                     defaults=errors.value,
                     errors=errors.error_dict or {},
                     prefix_error=False,
@@ -100,28 +100,28 @@
                 # the fly can throw this exception signaling that there's issue
                 # with user creation, explanation should be provided in
                 # Exception itself
-                h.flash(e, 'error')
+                webutils.flash(e, 'error')
             else:
                 # login_form already validated the password - now set the session cookie accordingly
-                auth_user = log_in_user(user, c.form_result['remember'], is_external_auth=False, ip_addr=request.ip_addr)
+                auth_user = base.log_in_user(user, c.form_result['remember'], is_external_auth=False, ip_addr=request.ip_addr)
                 if auth_user:
                     raise HTTPFound(location=c.came_from)
-                h.flash(_('Authentication failed.'), 'error')
+                webutils.flash(_('Authentication failed.'), 'error')
         else:
             # redirect if already logged in
             if not request.authuser.is_anonymous:
                 raise HTTPFound(location=c.came_from)
             # continue to show login to default user
 
-        return render('/login.html')
+        return base.render('/login.html')
 
     @HasPermissionAnyDecorator('hg.admin', 'hg.register.auto_activate',
                                'hg.register.manual_activate')
     def register(self):
-        def_user_perms = AuthUser(dbuser=User.get_default_user()).permissions['global']
+        def_user_perms = AuthUser(dbuser=db.User.get_default_user()).global_permissions
         c.auto_active = 'hg.register.auto_activate' in def_user_perms
 
-        settings = Setting.get_app_settings()
+        settings = db.Setting.get_app_settings()
         captcha_private_key = settings.get('captcha_private_key')
         c.captcha_active = bool(captcha_private_key)
         c.captcha_public_key = settings.get('captcha_public_key')
@@ -133,7 +133,6 @@
                 form_result['active'] = c.auto_active
 
                 if c.captcha_active:
-                    from kallithea.lib.recaptcha import submit
                     response = submit(request.POST.get('g-recaptcha-response'),
                                       private_key=captcha_private_key,
                                       remoteip=request.ip_addr)
@@ -145,14 +144,14 @@
                                                  error_dict=error_dict)
 
                 UserModel().create_registration(form_result)
-                h.flash(_('You have successfully registered with %s') % (c.site_name or 'Kallithea'),
+                webutils.flash(_('You have successfully registered with %s') % (c.site_name or 'Kallithea'),
                         category='success')
-                Session().commit()
+                meta.Session().commit()
                 raise HTTPFound(location=url('login_home'))
 
             except formencode.Invalid as errors:
                 return htmlfill.render(
-                    render('/register.html'),
+                    base.render('/register.html'),
                     defaults=errors.value,
                     errors=errors.error_dict or {},
                     prefix_error=False,
@@ -163,12 +162,12 @@
                 # the fly can throw this exception signaling that there's issue
                 # with user creation, explanation should be provided in
                 # Exception itself
-                h.flash(e, 'error')
+                webutils.flash(e, 'error')
 
-        return render('/register.html')
+        return base.render('/register.html')
 
     def password_reset(self):
-        settings = Setting.get_app_settings()
+        settings = db.Setting.get_app_settings()
         captcha_private_key = settings.get('captcha_private_key')
         c.captcha_active = bool(captcha_private_key)
         c.captcha_public_key = settings.get('captcha_public_key')
@@ -178,7 +177,6 @@
             try:
                 form_result = password_reset_form.to_python(dict(request.POST))
                 if c.captcha_active:
-                    from kallithea.lib.recaptcha import submit
                     response = submit(request.POST.get('g-recaptcha-response'),
                                       private_key=captcha_private_key,
                                       remoteip=request.ip_addr)
@@ -189,20 +187,20 @@
                         raise formencode.Invalid(_msg, _value, None,
                                                  error_dict=error_dict)
                 redirect_link = UserModel().send_reset_password_email(form_result)
-                h.flash(_('A password reset confirmation code has been sent'),
+                webutils.flash(_('A password reset confirmation code has been sent'),
                             category='success')
                 raise HTTPFound(location=redirect_link)
 
             except formencode.Invalid as errors:
                 return htmlfill.render(
-                    render('/password_reset.html'),
+                    base.render('/password_reset.html'),
                     defaults=errors.value,
                     errors=errors.error_dict or {},
                     prefix_error=False,
                     encoding="UTF-8",
                     force_defaults=False)
 
-        return render('/password_reset.html')
+        return base.render('/password_reset.html')
 
     def password_reset_confirmation(self):
         # This controller handles both GET and POST requests, though we
@@ -215,14 +213,14 @@
         c.timestamp = request.params.get('timestamp') or ''
         c.token = request.params.get('token') or ''
         if not request.POST:
-            return render('/password_reset_confirmation.html')
+            return base.render('/password_reset_confirmation.html')
 
         form = PasswordResetConfirmationForm()()
         try:
             form_result = form.to_python(dict(request.POST))
         except formencode.Invalid as errors:
             return htmlfill.render(
-                render('/password_reset_confirmation.html'),
+                base.render('/password_reset_confirmation.html'),
                 defaults=errors.value,
                 errors=errors.error_dict or {},
                 prefix_error=False,
@@ -234,14 +232,14 @@
             form_result['token'],
         ):
             return htmlfill.render(
-                render('/password_reset_confirmation.html'),
+                base.render('/password_reset_confirmation.html'),
                 defaults=form_result,
                 errors={'token': _('Invalid password reset token')},
                 prefix_error=False,
                 encoding='UTF-8')
 
         UserModel().reset_password(form_result['email'], form_result['password'])
-        h.flash(_('Successfully updated password'), category='success')
+        webutils.flash(_('Successfully updated password'), category='success')
         raise HTTPFound(location=url('login_home'))
 
     def logout(self):
@@ -255,4 +253,4 @@
         Only intended for testing but might also be useful for other kinds
         of automation.
         """
-        return h.session_csrf_secret_token()
+        return webutils.session_csrf_secret_token()
--- a/kallithea/controllers/pullrequests.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/pullrequests.py	Thu May 27 21:27:37 2021 +0200
@@ -35,21 +35,20 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPFound, HTTPNotFound
 
-from kallithea.config.routing import url
-from kallithea.controllers.changeset import _context_url, _ignorews_url, create_cs_pr_comment, delete_cs_pr_comment
-from kallithea.lib import diffs
-from kallithea.lib import helpers as h
+import kallithea.lib.helpers as h
+from kallithea.controllers import base
+from kallithea.controllers.changeset import create_cs_pr_comment, delete_cs_pr_comment
+from kallithea.lib import auth, diffs, webutils
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, jsonify, render
 from kallithea.lib.graphmod import graph_data
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.comment import ChangesetCommentsModel
-from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, Repository, User
 from kallithea.model.forms import PullRequestForm, PullRequestPostForm
-from kallithea.model.meta import Session
 from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel
 
 
@@ -59,21 +58,21 @@
 def _get_reviewer(user_id):
     """Look up user by ID and validate it as a potential reviewer."""
     try:
-        user = User.get(int(user_id))
+        user = db.User.get(int(user_id))
     except ValueError:
         user = None
 
     if user is None or user.is_default_user:
-        h.flash(_('Invalid reviewer "%s" specified') % user_id, category='error')
+        webutils.flash(_('Invalid reviewer "%s" specified') % user_id, category='error')
         raise HTTPBadRequest()
 
     return user
 
 
-class PullrequestsController(BaseRepoController):
+class PullrequestsController(base.BaseRepoController):
 
     def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None):
-        """return a structure with repo's interesting changesets, suitable for
+        """return a structure with scm repo's interesting changesets, suitable for
         the selectors in pullrequest.html
 
         rev: a revision that must be in the list somehow and selected by default
@@ -155,13 +154,14 @@
 
         # prio 4: tip revision
         if not selected:
-            if h.is_hg(repo):
+            if repo.alias == 'hg':
                 if tipbranch:
                     selected = 'branch:%s:%s' % (tipbranch, tiprev)
                 else:
                     selected = 'tag:null:' + repo.EMPTY_CHANGESET
                     tags.append((selected, 'null'))
             else:  # Git
+                assert repo.alias == 'git'
                 if not repo.branches:
                     selected = ''  # doesn't make sense, but better than nothing
                 elif 'master' in repo.branches:
@@ -183,9 +183,9 @@
             return False
 
         owner = request.authuser.user_id == pull_request.owner_id
-        reviewer = PullRequestReviewer.query() \
-            .filter(PullRequestReviewer.pull_request == pull_request) \
-            .filter(PullRequestReviewer.user_id == request.authuser.user_id) \
+        reviewer = db.PullRequestReviewer.query() \
+            .filter(db.PullRequestReviewer.pull_request == pull_request) \
+            .filter(db.PullRequestReviewer.user_id == request.authuser.user_id) \
             .count() != 0
 
         return request.authuser.admin or owner or reviewer
@@ -202,7 +202,7 @@
             url_params['closed'] = 1
         p = safe_int(request.GET.get('page'), 1)
 
-        q = PullRequest.query(include_closed=c.closed, sorted=True)
+        q = db.PullRequest.query(include_closed=c.closed, sorted=True)
         if c.from_:
             q = q.filter_by(org_repo=c.db_repo)
         else:
@@ -211,21 +211,21 @@
 
         c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100, **url_params)
 
-        return render('/pullrequests/pullrequest_show_all.html')
+        return base.render('/pullrequests/pullrequest_show_all.html')
 
     @LoginRequired()
     def show_my(self):
         c.closed = request.GET.get('closed') or ''
 
-        c.my_pull_requests = PullRequest.query(
+        c.my_pull_requests = db.PullRequest.query(
             include_closed=c.closed,
             sorted=True,
         ).filter_by(owner_id=request.authuser.user_id).all()
 
         c.participate_in_pull_requests = []
         c.participate_in_pull_requests_todo = []
-        done_status = set([ChangesetStatus.STATUS_APPROVED, ChangesetStatus.STATUS_REJECTED])
-        for pr in PullRequest.query(
+        done_status = set([db.ChangesetStatus.STATUS_APPROVED, db.ChangesetStatus.STATUS_REJECTED])
+        for pr in db.PullRequest.query(
             include_closed=c.closed,
             reviewer_id=request.authuser.user_id,
             sorted=True,
@@ -236,7 +236,7 @@
             else:
                 c.participate_in_pull_requests_todo.append(pr)
 
-        return render('/pullrequests/pullrequest_show_my.html')
+        return base.render('/pullrequests/pullrequest_show_my.html')
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
@@ -246,7 +246,7 @@
         try:
             org_scm_instance.get_changeset()
         except EmptyRepositoryError as e:
-            h.flash(_('There are no changesets yet'),
+            webutils.flash(_('There are no changesets yet'),
                     category='warning')
             raise HTTPFound(location=url('summary_home', repo_name=org_repo.repo_name))
 
@@ -291,11 +291,11 @@
         for fork in org_repo.forks:
             c.a_repos.append((fork.repo_name, fork.repo_name))
 
-        return render('/pullrequests/pullrequest.html')
+        return base.render('/pullrequests/pullrequest.html')
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def repo_info(self, repo_name):
         repo = c.db_repo
         refs, selected_ref = self._get_repo_refs(repo.scm_instance)
@@ -315,61 +315,61 @@
             log.error(traceback.format_exc())
             log.error(str(errors))
             msg = _('Error creating pull request: %s') % errors.msg
-            h.flash(msg, 'error')
+            webutils.flash(msg, 'error')
             raise HTTPBadRequest
 
         # heads up: org and other might seem backward here ...
         org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name
-        org_repo = Repository.guess_instance(_form['org_repo'])
+        org_repo = db.Repository.guess_instance(_form['org_repo'])
 
         other_ref = _form['other_ref'] # will have symbolic name and head revision
-        other_repo = Repository.guess_instance(_form['other_repo'])
+        other_repo = db.Repository.guess_instance(_form['other_repo'])
 
         reviewers = []
 
         title = _form['pullrequest_title']
         description = _form['pullrequest_desc'].strip()
-        owner = User.get(request.authuser.user_id)
+        owner = db.User.get(request.authuser.user_id)
 
         try:
             cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers)
         except CreatePullRequestAction.ValidationError as e:
-            h.flash(e, category='error', logf=log.error)
+            webutils.flash(e, category='error', logf=log.error)
             raise HTTPNotFound
 
         try:
             pull_request = cmd.execute()
-            Session().commit()
+            meta.Session().commit()
         except Exception:
-            h.flash(_('Error occurred while creating pull request'),
+            webutils.flash(_('Error occurred while creating pull request'),
                     category='error')
             log.error(traceback.format_exc())
             raise HTTPFound(location=url('pullrequest_home', repo_name=repo_name))
 
-        h.flash(_('Successfully opened new pull request'),
+        webutils.flash(_('Successfully opened new pull request'),
                 category='success')
         raise HTTPFound(location=pull_request.url())
 
     def create_new_iteration(self, old_pull_request, new_rev, title, description, reviewers):
-        owner = User.get(request.authuser.user_id)
+        owner = db.User.get(request.authuser.user_id)
         new_org_rev = self._get_ref_rev(old_pull_request.org_repo, 'rev', new_rev)
         new_other_rev = self._get_ref_rev(old_pull_request.other_repo, old_pull_request.other_ref_parts[0], old_pull_request.other_ref_parts[1])
         try:
             cmd = CreatePullRequestIterationAction(old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers)
         except CreatePullRequestAction.ValidationError as e:
-            h.flash(e, category='error', logf=log.error)
+            webutils.flash(e, category='error', logf=log.error)
             raise HTTPNotFound
 
         try:
             pull_request = cmd.execute()
-            Session().commit()
+            meta.Session().commit()
         except Exception:
-            h.flash(_('Error occurred while creating pull request'),
+            webutils.flash(_('Error occurred while creating pull request'),
                     category='error')
             log.error(traceback.format_exc())
             raise HTTPFound(location=old_pull_request.url())
 
-        h.flash(_('New pull request iteration created'),
+        webutils.flash(_('New pull request iteration created'),
                 category='success')
         raise HTTPFound(location=pull_request.url())
 
@@ -377,14 +377,14 @@
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
     def post(self, repo_name, pull_request_id):
-        pull_request = PullRequest.get_or_404(pull_request_id)
+        pull_request = db.PullRequest.get_or_404(pull_request_id)
         if pull_request.is_closed():
             raise HTTPForbidden()
         assert pull_request.other_repo.repo_name == repo_name
         # only owner or admin can update it
         owner = pull_request.owner_id == request.authuser.user_id
-        repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name)
-        if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner):
+        repo_admin = auth.HasRepoPermissionLevel('admin')(c.repo_name)
+        if not (auth.HasPermissionAny('hg.admin')() or repo_admin or owner):
             raise HTTPForbidden()
 
         _form = PullRequestPostForm()().to_python(request.POST)
@@ -397,11 +397,11 @@
         other_removed = old_reviewers - cur_reviewers
 
         if other_added:
-            h.flash(_('Meanwhile, the following reviewers have been added: %s') %
+            webutils.flash(_('Meanwhile, the following reviewers have been added: %s') %
                     (', '.join(u.username for u in other_added)),
                     category='warning')
         if other_removed:
-            h.flash(_('Meanwhile, the following reviewers have been removed: %s') %
+            webutils.flash(_('Meanwhile, the following reviewers have been removed: %s') %
                     (', '.join(u.username for u in other_removed)),
                     category='warning')
 
@@ -418,28 +418,28 @@
         old_description = pull_request.description
         pull_request.title = _form['pullrequest_title']
         pull_request.description = _form['pullrequest_desc'].strip() or _('No description')
-        pull_request.owner = User.get_by_username(_form['owner'])
-        user = User.get(request.authuser.user_id)
+        pull_request.owner = db.User.get_by_username(_form['owner'])
+        user = db.User.get(request.authuser.user_id)
 
         PullRequestModel().mention_from_description(user, pull_request, old_description)
         PullRequestModel().add_reviewers(user, pull_request, added_reviewers)
         PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers)
 
-        Session().commit()
-        h.flash(_('Pull request updated'), category='success')
+        meta.Session().commit()
+        webutils.flash(_('Pull request updated'), category='success')
 
         raise HTTPFound(location=pull_request.url())
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def delete(self, repo_name, pull_request_id):
-        pull_request = PullRequest.get_or_404(pull_request_id)
+        pull_request = db.PullRequest.get_or_404(pull_request_id)
         # only owner can delete it !
         if pull_request.owner_id == request.authuser.user_id:
             PullRequestModel().delete(pull_request)
-            Session().commit()
-            h.flash(_('Successfully deleted pull request'),
+            meta.Session().commit()
+            webutils.flash(_('Successfully deleted pull request'),
                     category='success')
             raise HTTPFound(location=url('my_pullrequests'))
         raise HTTPForbidden()
@@ -447,7 +447,7 @@
     @LoginRequired(allow_default_user=True)
     @HasRepoPermissionLevelDecorator('read')
     def show(self, repo_name, pull_request_id, extra=None):
-        c.pull_request = PullRequest.get_or_404(pull_request_id)
+        c.pull_request = db.PullRequest.get_or_404(pull_request_id)
         c.allowed_to_change_status = self._is_allowed_to_change_status(c.pull_request)
         cc_model = ChangesetCommentsModel()
         cs_model = ChangesetStatusModel()
@@ -475,7 +475,7 @@
                 c.cs_ranges.append(org_scm_instance.get_changeset(x))
             except ChangesetDoesNotExistError:
                 c.cs_ranges = []
-                h.flash(_('Revision %s not found in %s') % (x, c.cs_repo.repo_name),
+                webutils.flash(_('Revision %s not found in %s') % (x, c.cs_repo.repo_name),
                     'error')
                 break
         c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ...
@@ -553,7 +553,7 @@
                         show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name))
                         show.add(revs[0]) # make sure graph shows this so we can see how they relate
                         c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name,
-                            h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id))
+                            org_scm_instance.get_changeset(max(brevs)).short_id)
 
                     avail_show = sorted(show, reverse=True)
 
@@ -571,10 +571,8 @@
         c.cs_comments = c.cs_repo.get_comments(raw_ids)
         c.cs_statuses = c.cs_repo.statuses(raw_ids)
 
-        ignore_whitespace = request.GET.get('ignorews') == '1'
-        line_context = safe_int(request.GET.get('context'), 3)
-        c.ignorews_url = _ignorews_url
-        c.context_url = _context_url
+        ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET)
+        diff_context_size = h.get_diff_context_size(request.GET)
         fulldiff = request.GET.get('fulldiff')
         diff_limit = None if fulldiff else self.cut_off_limit
 
@@ -583,7 +581,7 @@
                   c.a_rev, c.cs_rev, org_scm_instance.path)
         try:
             raw_diff = diffs.get_diff(org_scm_instance, rev1=c.a_rev, rev2=c.cs_rev,
-                                      ignore_whitespace=ignore_whitespace, context=line_context)
+                                      ignore_whitespace=ignore_whitespace_diff, context=diff_context_size)
         except ChangesetDoesNotExistError:
             raw_diff = safe_bytes(_("The diff can't be shown - the PR revisions could not be found."))
         diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit)
@@ -598,7 +596,7 @@
             c.lines_deleted += st['deleted']
             filename = f['filename']
             fid = h.FID('', filename)
-            html_diff = diffs.as_html(enable_comments=True, parsed_lines=[f])
+            html_diff = diffs.as_html(parsed_lines=[f])
             c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, html_diff, st))
 
         # inline comments
@@ -618,23 +616,23 @@
          c.pull_request_pending_reviewers,
          c.current_voting_result,
          ) = cs_model.calculate_pull_request_result(c.pull_request)
-        c.changeset_statuses = ChangesetStatus.STATUSES
+        c.changeset_statuses = db.ChangesetStatus.STATUSES
 
         c.is_ajax_preview = False
         c.ancestors = None # [c.a_rev] ... but that is shown in an other way
-        return render('/pullrequests/pullrequest_show.html')
+        return base.render('/pullrequests/pullrequest_show.html')
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def comment(self, repo_name, pull_request_id):
-        pull_request = PullRequest.get_or_404(pull_request_id)
+        pull_request = db.PullRequest.get_or_404(pull_request_id)
         allowed_to_change_status = self._is_allowed_to_change_status(pull_request)
         return create_cs_pr_comment(repo_name, pull_request=pull_request,
                 allowed_to_change_status=allowed_to_change_status)
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def delete_comment(self, repo_name, comment_id):
         return delete_cs_pr_comment(repo_name, comment_id)
--- a/kallithea/controllers/root.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/root.py	Thu May 27 21:27:37 2021 +0200
@@ -14,9 +14,9 @@
 from tg import config
 from tgext.routes import RoutedController
 
-from kallithea.config.routing import make_map
+from kallithea.controllers import base
 from kallithea.controllers.error import ErrorController
-from kallithea.lib.base import BaseController
+from kallithea.controllers.routing import make_map
 
 
 # This is the main Kallithea entry point; TurboGears will forward all requests
@@ -26,7 +26,7 @@
 # The mapper is configured using routes defined in routing.py.  This use of the
 # 'mapper' attribute is a feature of tgext.routes, which is activated by
 # inheriting from its RoutedController class.
-class RootController(RoutedController, BaseController):
+class RootController(RoutedController, base.BaseController):
 
     def __init__(self):
         self.mapper = make_map(config)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/controllers/routing.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,773 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+Routes configuration
+
+The more specific and detailed routes should be defined first so they
+may take precedent over the more generic routes. For more information
+refer to the routes manual at http://routes.groovie.org/docs/
+"""
+
+import routes
+
+import kallithea
+from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
+from kallithea.lib.utils2 import safe_str
+
+
+class Mapper(routes.Mapper):
+    """
+    Subclassed Mapper with routematch patched to decode "unicode" str url to
+    *real* unicode str before applying matches and invoking controller methods.
+    """
+
+    def routematch(self, url=None, environ=None):
+        """
+        routematch that also decode url from "fake bytes" to real unicode
+        string before matching and invoking controllers.
+        """
+        # Process url like get_path_info does ... but PATH_INFO has already
+        # been retrieved from environ and is passed, so - let's just use that
+        # instead.
+        url = safe_str(url.encode('latin1'))
+        return super().routematch(url=url, environ=environ)
+
+
+def make_map(config):
+    """Create, configure and return the routes Mapper"""
+    rmap = Mapper(directory=config['paths']['controllers'],
+                  always_scan=config['debug'])
+    rmap.minimization = False
+    rmap.explicit = False
+
+    def check_repo(environ, match_dict):
+        """
+        Check for valid repository for proper 404 handling.
+        Also, a bit of side effect modifying match_dict ...
+        """
+        if match_dict.get('f_path'):
+            # fix for multiple initial slashes that causes errors
+            match_dict['f_path'] = match_dict['f_path'].lstrip('/')
+
+        return is_valid_repo(match_dict['repo_name'], config['base_path'])
+
+    def check_group(environ, match_dict):
+        """
+        check for valid repository group for proper 404 handling
+
+        :param environ:
+        :param match_dict:
+        """
+        repo_group_name = match_dict.get('group_name')
+        return is_valid_repo_group(repo_group_name, config['base_path'])
+
+    def check_group_skip_path(environ, match_dict):
+        """
+        check for valid repository group for proper 404 handling, but skips
+        verification of existing path
+
+        :param environ:
+        :param match_dict:
+        """
+        repo_group_name = match_dict.get('group_name')
+        return is_valid_repo_group(repo_group_name, config['base_path'],
+                                   skip_path_check=True)
+
+    def check_user_group(environ, match_dict):
+        """
+        check for valid user group for proper 404 handling
+
+        :param environ:
+        :param match_dict:
+        """
+        return True
+
+    def check_int(environ, match_dict):
+        return match_dict.get('id').isdigit()
+
+    #==========================================================================
+    # CUSTOM ROUTES HERE
+    #==========================================================================
+
+    # MAIN PAGE
+    rmap.connect('home', '/', controller='home')
+    rmap.connect('about', '/about', controller='home', action='about')
+    rmap.redirect('/favicon.ico', '/images/favicon.ico')
+    rmap.connect('repo_switcher_data', '/_repos', controller='home',
+                 action='repo_switcher_data')
+    rmap.connect('users_and_groups_data', '/_users_and_groups', controller='home',
+                 action='users_and_groups_data')
+
+    rmap.connect('rst_help',
+                 "http://docutils.sourceforge.net/docs/user/rst/quickref.html",
+                 _static=True)
+    rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True)
+    rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True)
+
+    # ADMIN REPOSITORY ROUTES
+    ADMIN_PREFIX = kallithea.ADMIN_PREFIX
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/repos') as m:
+        m.connect("repos", "/repos",
+                  action="create", conditions=dict(method=["POST"]))
+        m.connect("repos", "/repos",
+                  conditions=dict(method=["GET"]))
+        m.connect("new_repo", "/create_repository",
+                  action="create_repository", conditions=dict(method=["GET"]))
+        m.connect("update_repo", "/repos/{repo_name:.*?}",
+                  action="update", conditions=dict(method=["POST"],
+                  function=check_repo))
+        m.connect("delete_repo", "/repos/{repo_name:.*?}/delete",
+                  action="delete", conditions=dict(method=["POST"]))
+
+    # ADMIN REPOSITORY GROUPS ROUTES
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/repo_groups') as m:
+        m.connect("repos_groups", "/repo_groups",
+                  action="create", conditions=dict(method=["POST"]))
+        m.connect("repos_groups", "/repo_groups",
+                  conditions=dict(method=["GET"]))
+        m.connect("new_repos_group", "/repo_groups/new",
+                  action="new", conditions=dict(method=["GET"]))
+        m.connect("update_repos_group", "/repo_groups/{group_name:.*?}",
+                  action="update", conditions=dict(method=["POST"],
+                                                   function=check_group))
+
+        m.connect("repos_group", "/repo_groups/{group_name:.*?}",
+                  action="show", conditions=dict(method=["GET"],
+                                                 function=check_group))
+
+        # EXTRAS REPO GROUP ROUTES
+        m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit",
+                  action="edit",
+                  conditions=dict(method=["GET"], function=check_group))
+
+        m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced",
+                  action="edit_repo_group_advanced",
+                  conditions=dict(method=["GET"], function=check_group))
+
+        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
+                  action="edit_repo_group_perms",
+                  conditions=dict(method=["GET"], function=check_group))
+        m.connect("edit_repo_group_perms_update", "/repo_groups/{group_name:.*?}/edit/permissions",
+                  action="update_perms",
+                  conditions=dict(method=["POST"], function=check_group))
+        m.connect("edit_repo_group_perms_delete", "/repo_groups/{group_name:.*?}/edit/permissions/delete",
+                  action="delete_perms",
+                  conditions=dict(method=["POST"], function=check_group))
+
+        m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}/delete",
+                  action="delete", conditions=dict(method=["POST"],
+                                                   function=check_group_skip_path))
+
+    # ADMIN USER ROUTES
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/users') as m:
+        m.connect("new_user", "/users/new",
+                  action="create", conditions=dict(method=["POST"]))
+        m.connect("users", "/users",
+                  conditions=dict(method=["GET"]))
+        m.connect("formatted_users", "/users.{format}",
+                  conditions=dict(method=["GET"]))
+        m.connect("new_user", "/users/new",
+                  action="new", conditions=dict(method=["GET"]))
+        m.connect("update_user", "/users/{id}",
+                  action="update", conditions=dict(method=["POST"]))
+        m.connect("delete_user", "/users/{id}/delete",
+                  action="delete", conditions=dict(method=["POST"]))
+        m.connect("edit_user", "/users/{id}/edit",
+                  action="edit", conditions=dict(method=["GET"]))
+
+        # EXTRAS USER ROUTES
+        m.connect("edit_user_advanced", "/users/{id}/edit/advanced",
+                  action="edit_advanced", conditions=dict(method=["GET"]))
+
+        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
+                  action="edit_api_keys", conditions=dict(method=["GET"]))
+        m.connect("edit_user_api_keys_update", "/users/{id}/edit/api_keys",
+                  action="add_api_key", conditions=dict(method=["POST"]))
+        m.connect("edit_user_api_keys_delete", "/users/{id}/edit/api_keys/delete",
+                  action="delete_api_key", conditions=dict(method=["POST"]))
+
+        m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys",
+                  action="edit_ssh_keys", conditions=dict(method=["GET"]))
+        m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys",
+                  action="ssh_keys_add", conditions=dict(method=["POST"]))
+        m.connect("edit_user_ssh_keys_delete", "/users/{id}/edit/ssh_keys/delete",
+                  action="ssh_keys_delete", conditions=dict(method=["POST"]))
+
+        m.connect("edit_user_perms", "/users/{id}/edit/permissions",
+                  action="edit_perms", conditions=dict(method=["GET"]))
+        m.connect("edit_user_perms_update", "/users/{id}/edit/permissions",
+                  action="update_perms", conditions=dict(method=["POST"]))
+
+        m.connect("edit_user_emails", "/users/{id}/edit/emails",
+                  action="edit_emails", conditions=dict(method=["GET"]))
+        m.connect("edit_user_emails_update", "/users/{id}/edit/emails",
+                  action="add_email", conditions=dict(method=["POST"]))
+        m.connect("edit_user_emails_delete", "/users/{id}/edit/emails/delete",
+                  action="delete_email", conditions=dict(method=["POST"]))
+
+        m.connect("edit_user_ips", "/users/{id}/edit/ips",
+                  action="edit_ips", conditions=dict(method=["GET"]))
+        m.connect("edit_user_ips_update", "/users/{id}/edit/ips",
+                  action="add_ip", conditions=dict(method=["POST"]))
+        m.connect("edit_user_ips_delete", "/users/{id}/edit/ips/delete",
+                  action="delete_ip", conditions=dict(method=["POST"]))
+
+    # ADMIN USER GROUPS REST ROUTES
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/user_groups') as m:
+        m.connect("users_groups", "/user_groups",
+                  action="create", conditions=dict(method=["POST"]))
+        m.connect("users_groups", "/user_groups",
+                  conditions=dict(method=["GET"]))
+        m.connect("new_users_group", "/user_groups/new",
+                  action="new", conditions=dict(method=["GET"]))
+        m.connect("update_users_group", "/user_groups/{id}",
+                  action="update", conditions=dict(method=["POST"]))
+        m.connect("delete_users_group", "/user_groups/{id}/delete",
+                  action="delete", conditions=dict(method=["POST"]))
+        m.connect("edit_users_group", "/user_groups/{id}/edit",
+                  action="edit", conditions=dict(method=["GET"]),
+                  function=check_user_group)
+
+        # EXTRAS USER GROUP ROUTES
+        m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms",
+                  action="edit_default_perms", conditions=dict(method=["GET"]))
+        m.connect("edit_user_group_default_perms_update", "/user_groups/{id}/edit/default_perms",
+                  action="update_default_perms", conditions=dict(method=["POST"]))
+
+        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
+                  action="edit_perms", conditions=dict(method=["GET"]))
+        m.connect("edit_user_group_perms_update", "/user_groups/{id}/edit/perms",
+                  action="update_perms", conditions=dict(method=["POST"]))
+        m.connect("edit_user_group_perms_delete", "/user_groups/{id}/edit/perms/delete",
+                  action="delete_perms", conditions=dict(method=["POST"]))
+
+        m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced",
+                  action="edit_advanced", conditions=dict(method=["GET"]))
+
+        m.connect("edit_user_group_members", "/user_groups/{id}/edit/members",
+                  action="edit_members", conditions=dict(method=["GET"]))
+
+    # ADMIN PERMISSIONS ROUTES
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/permissions') as m:
+        m.connect("admin_permissions", "/permissions",
+                  action="permission_globals", conditions=dict(method=["POST"]))
+        m.connect("admin_permissions", "/permissions",
+                  action="permission_globals", conditions=dict(method=["GET"]))
+
+        m.connect("admin_permissions_ips", "/permissions/ips",
+                  action="permission_ips", conditions=dict(method=["GET"]))
+
+        m.connect("admin_permissions_perms", "/permissions/perms",
+                  action="permission_perms", conditions=dict(method=["GET"]))
+
+    # ADMIN DEFAULTS ROUTES
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/defaults') as m:
+        m.connect('defaults', '/defaults')
+        m.connect('defaults_update', 'defaults/{id}/update',
+                  action="update", conditions=dict(method=["POST"]))
+
+    # ADMIN AUTH SETTINGS
+    rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX,
+                 controller='admin/auth_settings', action='auth_settings',
+                 conditions=dict(method=["POST"]))
+    rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX,
+                 controller='admin/auth_settings')
+
+    # ADMIN SETTINGS ROUTES
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/settings') as m:
+        m.connect("admin_settings", "/settings",
+                  action="settings_vcs", conditions=dict(method=["POST"]))
+        m.connect("admin_settings", "/settings",
+                  action="settings_vcs", conditions=dict(method=["GET"]))
+
+        m.connect("admin_settings_mapping", "/settings/mapping",
+                  action="settings_mapping", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_mapping", "/settings/mapping",
+                  action="settings_mapping", conditions=dict(method=["GET"]))
+
+        m.connect("admin_settings_global", "/settings/global",
+                  action="settings_global", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_global", "/settings/global",
+                  action="settings_global", conditions=dict(method=["GET"]))
+
+        m.connect("admin_settings_visual", "/settings/visual",
+                  action="settings_visual", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_visual", "/settings/visual",
+                  action="settings_visual", conditions=dict(method=["GET"]))
+
+        m.connect("admin_settings_email", "/settings/email",
+                  action="settings_email", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_email", "/settings/email",
+                  action="settings_email", conditions=dict(method=["GET"]))
+
+        m.connect("admin_settings_hooks", "/settings/hooks",
+                  action="settings_hooks", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_hooks_delete", "/settings/hooks/delete",
+                  action="settings_hooks", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_hooks", "/settings/hooks",
+                  action="settings_hooks", conditions=dict(method=["GET"]))
+
+        m.connect("admin_settings_search", "/settings/search",
+                  action="settings_search", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_search", "/settings/search",
+                  action="settings_search", conditions=dict(method=["GET"]))
+
+        m.connect("admin_settings_system", "/settings/system",
+                  action="settings_system", conditions=dict(method=["POST"]))
+        m.connect("admin_settings_system", "/settings/system",
+                  action="settings_system", conditions=dict(method=["GET"]))
+
+    # ADMIN MY ACCOUNT
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/my_account') as m:
+
+        m.connect("my_account", "/my_account",
+                  action="my_account", conditions=dict(method=["GET"]))
+        m.connect("my_account", "/my_account",
+                  action="my_account", conditions=dict(method=["POST"]))
+
+        m.connect("my_account_password", "/my_account/password",
+                  action="my_account_password", conditions=dict(method=["GET"]))
+        m.connect("my_account_password", "/my_account/password",
+                  action="my_account_password", conditions=dict(method=["POST"]))
+
+        m.connect("my_account_repos", "/my_account/repos",
+                  action="my_account_repos", conditions=dict(method=["GET"]))
+
+        m.connect("my_account_watched", "/my_account/watched",
+                  action="my_account_watched", conditions=dict(method=["GET"]))
+
+        m.connect("my_account_perms", "/my_account/perms",
+                  action="my_account_perms", conditions=dict(method=["GET"]))
+
+        m.connect("my_account_emails", "/my_account/emails",
+                  action="my_account_emails", conditions=dict(method=["GET"]))
+        m.connect("my_account_emails", "/my_account/emails",
+                  action="my_account_emails_add", conditions=dict(method=["POST"]))
+        m.connect("my_account_emails_delete", "/my_account/emails/delete",
+                  action="my_account_emails_delete", conditions=dict(method=["POST"]))
+
+        m.connect("my_account_api_keys", "/my_account/api_keys",
+                  action="my_account_api_keys", conditions=dict(method=["GET"]))
+        m.connect("my_account_api_keys", "/my_account/api_keys",
+                  action="my_account_api_keys_add", conditions=dict(method=["POST"]))
+        m.connect("my_account_api_keys_delete", "/my_account/api_keys/delete",
+                  action="my_account_api_keys_delete", conditions=dict(method=["POST"]))
+
+        m.connect("my_account_ssh_keys", "/my_account/ssh_keys",
+                  action="my_account_ssh_keys", conditions=dict(method=["GET"]))
+        m.connect("my_account_ssh_keys", "/my_account/ssh_keys",
+                  action="my_account_ssh_keys_add", conditions=dict(method=["POST"]))
+        m.connect("my_account_ssh_keys_delete", "/my_account/ssh_keys/delete",
+                  action="my_account_ssh_keys_delete", conditions=dict(method=["POST"]))
+
+    # ADMIN GIST
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/gists') as m:
+        m.connect("gists", "/gists",
+                  action="create", conditions=dict(method=["POST"]))
+        m.connect("gists", "/gists",
+                  conditions=dict(method=["GET"]))
+        m.connect("new_gist", "/gists/new",
+                  action="new", conditions=dict(method=["GET"]))
+
+        m.connect("gist_delete", "/gists/{gist_id}/delete",
+                  action="delete", conditions=dict(method=["POST"]))
+        m.connect("edit_gist", "/gists/{gist_id}/edit",
+                  action="edit", conditions=dict(method=["GET", "POST"]))
+        m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision",
+                  action="check_revision", conditions=dict(method=["POST"]))
+
+        m.connect("gist", "/gists/{gist_id}",
+                  action="show", conditions=dict(method=["GET"]))
+        m.connect("gist_rev", "/gists/{gist_id}/{revision}",
+                  revision="tip",
+                  action="show", conditions=dict(method=["GET"]))
+        m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}",
+                  revision="tip",
+                  action="show", conditions=dict(method=["GET"]))
+        m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}",
+                  revision='tip',
+                  action="show", conditions=dict(method=["GET"]))
+
+    # ADMIN MAIN PAGES
+    with rmap.submapper(path_prefix=ADMIN_PREFIX,
+                        controller='admin/admin') as m:
+        m.connect('admin_home', '')
+        m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9. _-]*}',
+                  action='add_repo')
+    #==========================================================================
+    # API V2
+    #==========================================================================
+    with rmap.submapper(path_prefix=ADMIN_PREFIX, controller='api/api',
+                        action='_dispatch') as m:
+        m.connect('api', '/api')
+
+    # USER JOURNAL
+    rmap.connect('journal', '%s/journal' % ADMIN_PREFIX,
+                 controller='journal')
+    rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX,
+                 controller='journal', action='journal_rss')
+    rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX,
+                 controller='journal', action='journal_atom')
+
+    rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
+                 controller='journal', action="public_journal")
+
+    rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX,
+                 controller='journal', action="public_journal_rss")
+
+    rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX,
+                 controller='journal', action="public_journal_rss")
+
+    rmap.connect('public_journal_atom',
+                 '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal',
+                 action="public_journal_atom")
+
+    rmap.connect('public_journal_atom_old',
+                 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
+                 action="public_journal_atom")
+
+    rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
+                 controller='journal', action='toggle_following',
+                 conditions=dict(method=["POST"]))
+
+    # SEARCH
+    rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
+    rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX,
+                 controller='search',
+                 conditions=dict(function=check_repo))
+    rmap.connect('search_repo', '/{repo_name:.*?}/search',
+                 controller='search',
+                 conditions=dict(function=check_repo),
+                 )
+
+    # LOGIN/LOGOUT/REGISTER/SIGN IN
+    rmap.connect('session_csrf_secret_token', '%s/session_csrf_secret_token' % ADMIN_PREFIX, controller='login', action='session_csrf_secret_token')
+    rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
+    rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
+                 action='logout')
+
+    rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
+                 action='register')
+
+    rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
+                 controller='login', action='password_reset')
+
+    rmap.connect('reset_password_confirmation',
+                 '%s/password_reset_confirmation' % ADMIN_PREFIX,
+                 controller='login', action='password_reset_confirmation')
+
+    # FEEDS
+    rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss',
+                controller='feed', action='rss',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom',
+                controller='feed', action='atom',
+                conditions=dict(function=check_repo))
+
+    #==========================================================================
+    # REPOSITORY ROUTES
+    #==========================================================================
+    rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating',
+                controller='admin/repos', action='repo_creating')
+    rmap.connect('repo_check_home', '/{repo_name:.*?}/repo_check_creating',
+                controller='admin/repos', action='repo_check')
+
+    rmap.connect('summary_home', '/{repo_name:.*?}',
+                controller='summary',
+                conditions=dict(function=check_repo))
+
+    # must be here for proper group/repo catching
+    rmap.connect('repos_group_home', '/{group_name:.*}',
+                controller='admin/repo_groups', action="show_by_name",
+                conditions=dict(function=check_group))
+    rmap.connect('repo_stats_home', '/{repo_name:.*?}/statistics',
+                controller='summary', action='statistics',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('repo_size', '/{repo_name:.*?}/repo_size',
+                controller='summary', action='repo_size',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('repo_refs_data', '/{repo_name:.*?}/refs-data',
+                 controller='home', action='repo_refs_data')
+
+    rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision:.*}',
+                controller='changeset', revision='tip',
+                conditions=dict(function=check_repo))
+    rmap.connect('changeset_children', '/{repo_name:.*?}/changeset_children/{revision}',
+                controller='changeset', revision='tip', action="changeset_children",
+                conditions=dict(function=check_repo))
+    rmap.connect('changeset_parents', '/{repo_name:.*?}/changeset_parents/{revision}',
+                controller='changeset', revision='tip', action="changeset_parents",
+                conditions=dict(function=check_repo))
+
+    # repo edit options
+    rmap.connect("edit_repo", "/{repo_name:.*?}/settings",
+                 controller='admin/repos', action="edit",
+                 conditions=dict(method=["GET"], function=check_repo))
+
+    rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions",
+                 controller='admin/repos', action="edit_permissions",
+                 conditions=dict(method=["GET"], function=check_repo))
+    rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions",
+                 controller='admin/repos', action="edit_permissions_update",
+                 conditions=dict(method=["POST"], function=check_repo))
+    rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions/delete",
+                 controller='admin/repos', action="edit_permissions_revoke",
+                 conditions=dict(method=["POST"], function=check_repo))
+
+    rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields",
+                 controller='admin/repos', action="edit_fields",
+                 conditions=dict(method=["GET"], function=check_repo))
+    rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new",
+                 controller='admin/repos', action="create_repo_field",
+                 conditions=dict(method=["POST"], function=check_repo))
+    rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}/delete",
+                 controller='admin/repos', action="delete_repo_field",
+                 conditions=dict(method=["POST"], function=check_repo))
+
+    rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced",
+                 controller='admin/repos', action="edit_advanced",
+                 conditions=dict(method=["GET"], function=check_repo))
+
+    rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal",
+                 controller='admin/repos', action="edit_advanced_journal",
+                 conditions=dict(method=["POST"], function=check_repo))
+
+    rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork",
+                 controller='admin/repos', action="edit_advanced_fork",
+                 conditions=dict(method=["POST"], function=check_repo))
+
+    rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
+                 controller='admin/repos', action="edit_remote",
+                 conditions=dict(method=["GET"], function=check_repo))
+    rmap.connect("edit_repo_remote_update", "/{repo_name:.*?}/settings/remote",
+                 controller='admin/repos', action="edit_remote",
+                 conditions=dict(method=["POST"], function=check_repo))
+
+    rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics",
+                 controller='admin/repos', action="edit_statistics",
+                 conditions=dict(method=["GET"], function=check_repo))
+    rmap.connect("edit_repo_statistics_update", "/{repo_name:.*?}/settings/statistics",
+                 controller='admin/repos', action="edit_statistics",
+                 conditions=dict(method=["POST"], function=check_repo))
+
+    # still working url for backward compat.
+    rmap.connect('raw_changeset_home_depraced',
+                 '/{repo_name:.*?}/raw-changeset/{revision}',
+                 controller='changeset', action='changeset_raw',
+                 revision='tip', conditions=dict(function=check_repo))
+
+    ## new URLs
+    rmap.connect('changeset_raw_home',
+                 '/{repo_name:.*?}/changeset-diff/{revision}',
+                 controller='changeset', action='changeset_raw',
+                 revision='tip', conditions=dict(function=check_repo))
+
+    rmap.connect('changeset_patch_home',
+                 '/{repo_name:.*?}/changeset-patch/{revision}',
+                 controller='changeset', action='changeset_patch',
+                 revision='tip', conditions=dict(function=check_repo))
+
+    rmap.connect('changeset_download_home',
+                 '/{repo_name:.*?}/changeset-download/{revision}',
+                 controller='changeset', action='changeset_download',
+                 revision='tip', conditions=dict(function=check_repo))
+
+    rmap.connect('changeset_comment',
+                 '/{repo_name:.*?}/changeset-comment/{revision}',
+                controller='changeset', revision='tip', action='comment',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('changeset_comment_delete',
+                 '/{repo_name:.*?}/changeset-comment/{comment_id}/delete',
+                controller='changeset', action='delete_comment',
+                conditions=dict(function=check_repo, method=["POST"]))
+
+    rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}',
+                 controller='changeset', action='changeset_info')
+
+    rmap.connect('compare_home',
+                 '/{repo_name:.*?}/compare',
+                 controller='compare',
+                 conditions=dict(function=check_repo))
+
+    rmap.connect('compare_url',
+                 '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref_name:.*?}...{other_ref_type}@{other_ref_name:.*?}',
+                 controller='compare', action='compare',
+                 conditions=dict(function=check_repo),
+                 requirements=dict(
+                            org_ref_type='(branch|book|tag|rev|__other_ref_type__)',
+                            other_ref_type='(branch|book|tag|rev|__org_ref_type__)')
+                 )
+
+    rmap.connect('pullrequest_home',
+                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
+                 conditions=dict(function=check_repo,
+                                                 method=["GET"]))
+
+    rmap.connect('pullrequest_repo_info',
+                 '/{repo_name:.*?}/pull-request-repo-info',
+                 controller='pullrequests', action='repo_info',
+                 conditions=dict(function=check_repo, method=["GET"]))
+
+    rmap.connect('pullrequest',
+                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
+                 action='create', conditions=dict(function=check_repo,
+                                                  method=["POST"]))
+
+    rmap.connect('pullrequest_show',
+                 '/{repo_name:.*?}/pull-request/{pull_request_id:\\d+}{extra:(/.*)?}', extra='',
+                 controller='pullrequests',
+                 action='show', conditions=dict(function=check_repo,
+                                                method=["GET"]))
+    rmap.connect('pullrequest_post',
+                 '/{repo_name:.*?}/pull-request/{pull_request_id}',
+                 controller='pullrequests',
+                 action='post', conditions=dict(function=check_repo,
+                                                method=["POST"]))
+    rmap.connect('pullrequest_delete',
+                 '/{repo_name:.*?}/pull-request/{pull_request_id}/delete',
+                 controller='pullrequests',
+                 action='delete', conditions=dict(function=check_repo,
+                                                  method=["POST"]))
+
+    rmap.connect('pullrequest_show_all',
+                 '/{repo_name:.*?}/pull-request',
+                 controller='pullrequests',
+                 action='show_all', conditions=dict(function=check_repo,
+                                                method=["GET"]))
+
+    rmap.connect('my_pullrequests',
+                 '/my_pullrequests',
+                 controller='pullrequests',
+                 action='show_my', conditions=dict(method=["GET"]))
+
+    rmap.connect('pullrequest_comment',
+                 '/{repo_name:.*?}/pull-request-comment/{pull_request_id}',
+                 controller='pullrequests',
+                 action='comment', conditions=dict(function=check_repo,
+                                                method=["POST"]))
+
+    rmap.connect('pullrequest_comment_delete',
+                 '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete',
+                controller='pullrequests', action='delete_comment',
+                conditions=dict(function=check_repo, method=["POST"]))
+
+    rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary',
+                controller='summary', conditions=dict(function=check_repo))
+
+    rmap.connect('changelog_home', '/{repo_name:.*?}/changelog',
+                controller='changelog', conditions=dict(function=check_repo))
+
+    rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}',
+                controller='changelog',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}',
+                controller='changelog', action='changelog_details',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}',
+                controller='files', revision='tip', f_path='',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('files_home_nopath', '/{repo_name:.*?}/files/{revision}',
+                controller='files', revision='tip', f_path='',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('files_history_home',
+                 '/{repo_name:.*?}/history/{revision}/{f_path:.*}',
+                 controller='files', action='history', revision='tip', f_path='',
+                 conditions=dict(function=check_repo))
+
+    rmap.connect('files_authors_home',
+                 '/{repo_name:.*?}/authors/{revision}/{f_path:.*}',
+                 controller='files', action='authors', revision='tip', f_path='',
+                 conditions=dict(function=check_repo))
+
+    rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}',
+                controller='files', action='diff', revision='tip', f_path='',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('files_diff_2way_home', '/{repo_name:.*?}/diff-2way/{f_path:.+}',
+                controller='files', action='diff_2way', revision='tip', f_path='',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('files_rawfile_home',
+                 '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}',
+                 controller='files', action='rawfile', revision='tip',
+                 f_path='', conditions=dict(function=check_repo))
+
+    rmap.connect('files_raw_home',
+                 '/{repo_name:.*?}/raw/{revision}/{f_path:.*}',
+                 controller='files', action='raw', revision='tip', f_path='',
+                 conditions=dict(function=check_repo))
+
+    rmap.connect('files_annotate_home',
+                 '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}',
+                 controller='files', revision='tip',
+                 f_path='', annotate='1', conditions=dict(function=check_repo))
+
+    rmap.connect('files_edit_home',
+                 '/{repo_name:.*?}/edit/{revision}/{f_path:.*}',
+                 controller='files', action='edit', revision='tip',
+                 f_path='', conditions=dict(function=check_repo))
+
+    rmap.connect('files_add_home',
+                 '/{repo_name:.*?}/add/{revision}/{f_path:.*}',
+                 controller='files', action='add', revision='tip',
+                 f_path='', conditions=dict(function=check_repo))
+
+    rmap.connect('files_delete_home',
+                 '/{repo_name:.*?}/delete/{revision}/{f_path:.*}',
+                 controller='files', action='delete', revision='tip',
+                 f_path='', conditions=dict(function=check_repo))
+
+    rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}',
+                controller='files', action='archivefile',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('files_nodelist_home',
+                 '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}',
+                controller='files', action='nodelist',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork',
+                controller='forks', action='fork_create',
+                conditions=dict(function=check_repo, method=["POST"]))
+
+    rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork',
+                controller='forks', action='fork',
+                conditions=dict(function=check_repo))
+
+    rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks',
+                 controller='forks', action='forks',
+                 conditions=dict(function=check_repo))
+
+    rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers',
+                 controller='followers', action='followers',
+                 conditions=dict(function=check_repo))
+
+    return rmap
--- a/kallithea/controllers/search.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/search.py	Thu May 27 21:27:37 2021 +0200
@@ -35,8 +35,8 @@
 from whoosh.qparser import QueryParser, QueryParserError
 from whoosh.query import Phrase, Prefix
 
+from kallithea.controllers import base
 from kallithea.lib.auth import LoginRequired
-from kallithea.lib.base import BaseRepoController, render
 from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA, WhooshResultWrapper
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import safe_int
@@ -46,7 +46,7 @@
 log = logging.getLogger(__name__)
 
 
-class SearchController(BaseRepoController):
+class SearchController(base.BaseRepoController):
 
     @LoginRequired(allow_default_user=True)
     def index(self, repo_name=None):
@@ -139,4 +139,4 @@
                 c.runtime = _('An error occurred during search operation.')
 
         # Return a rendered template
-        return render('/search/search.html')
+        return base.render('/search/search.html')
--- a/kallithea/controllers/summary.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/controllers/summary.py	Thu May 27 21:27:37 2021 +0200
@@ -38,19 +38,17 @@
 from tg.i18n import ugettext as _
 from webob.exc import HTTPBadRequest
 
-import kallithea.lib.helpers as h
-from kallithea.config.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP
-from kallithea.lib import ext_json
+from kallithea.controllers import base
+from kallithea.lib import ext_json, webutils
 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
-from kallithea.lib.base import BaseRepoController, jsonify, render
-from kallithea.lib.celerylib.tasks import get_commits_stats
+from kallithea.lib.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP
 from kallithea.lib.markup_renderer import MarkupRenderer
 from kallithea.lib.page import Page
 from kallithea.lib.utils2 import safe_int, safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError
 from kallithea.lib.vcs.nodes import FileNode
-from kallithea.model.db import Statistics
+from kallithea.model import async_tasks, db
 
 
 log = logging.getLogger(__name__)
@@ -60,7 +58,7 @@
                            key=lambda y:y[0][1] + y[1][1])]
 
 
-class SummaryController(BaseRepoController):
+class SummaryController(base.BaseRepoController):
 
     def __get_readme_data(self, db_repo):
         repo_name = db_repo.repo_name
@@ -108,7 +106,7 @@
         try:
             collection = c.db_repo_scm_instance.get_changesets(reverse=True)
         except EmptyRepositoryError as e:
-            h.flash(e, category='warning')
+            webutils.flash(e, category='warning')
             collection = []
         c.cs_pagination = Page(collection, page=p, items_per_page=size)
         page_revisions = [x.raw_id for x in list(c.cs_pagination)]
@@ -131,8 +129,8 @@
         else:
             c.show_stats = False
 
-        stats = Statistics.query() \
-            .filter(Statistics.repository == c.db_repo) \
+        stats = db.Statistics.query() \
+            .filter(db.Statistics.repository == c.db_repo) \
             .scalar()
 
         c.stats_percentage = 0
@@ -150,11 +148,11 @@
         c.enable_downloads = c.db_repo.enable_downloads
         c.readme_data, c.readme_file = \
             self.__get_readme_data(c.db_repo)
-        return render('summary/summary.html')
+        return base.render('summary/summary.html')
 
     @LoginRequired()
     @HasRepoPermissionLevelDecorator('read')
-    @jsonify
+    @base.jsonify
     def repo_size(self, repo_name):
         if request.is_xhr:
             return c.db_repo._repo_size()
@@ -181,8 +179,8 @@
         c.ts_min = ts_min_m
         c.ts_max = ts_max_y
 
-        stats = Statistics.query() \
-            .filter(Statistics.repository == c.db_repo) \
+        stats = db.Statistics.query() \
+            .filter(db.Statistics.repository == c.db_repo) \
             .scalar()
         c.stats_percentage = 0
         if stats and stats.languages:
@@ -210,5 +208,5 @@
             c.trending_languages = []
 
         recurse_limit = 500  # don't recurse more than 500 times when parsing
-        get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit)
-        return render('summary/statistics.html')
+        async_tasks.get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit)
+        return base.render('summary/statistics.html')
--- a/kallithea/front-end/kallithea-diff.less	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/front-end/kallithea-diff.less	Thu May 27 21:27:37 2021 +0200
@@ -62,6 +62,7 @@
   border-collapse: collapse;
   border-radius: 0px !important;
   width: 100%;
+  table-layout: fixed;
 
   /* line coloring */
   .context {
@@ -105,31 +106,26 @@
     border-color: rgba(0, 0, 0, 0.3);
   }
 
-  /* line numbers */
-  .lineno {
-    padding-left: 2px;
-    padding-right: 2px !important;
-    width: 30px;
+  /* line number columns */
+  td.lineno {
+    width: 4em;
     border-right: 1px solid @panel-default-border !important;
     vertical-align: middle !important;
-    text-align: center;
-  }
-  .lineno.new {
-    text-align: right;
-  }
-  .lineno.old {
-    text-align: right;
-  }
-  .lineno a {
-    color: #aaa !important;
     font-size: 11px;
     font-family: @font-family-monospace;
     line-height: normal;
-    padding-left: 6px;
-    padding-right: 6px;
-    display: block;
+    text-align: center;
+  }
+  td.lineno[colspan="2"] {
+    width: 8em;
   }
-  .line:hover .lineno a {
+  td.lineno a {
+    color: #aaa !important;
+    display: inline-block;
+    min-width: 2em;
+    text-align: right;
+  }
+  tr.line:hover td.lineno a {
     color: #333 !important;
   }
   /** CODE **/
@@ -172,27 +168,24 @@
   left: -8px;
   box-sizing: border-box;
 }
-/* comment bubble, only visible when in a commentable diff */
-.commentable-diff tr.line.add:hover td .add-bubble,
-.commentable-diff tr.line.del:hover td .add-bubble,
-.commentable-diff tr.line.unmod:hover td .add-bubble {
+.commentable-diff tr.line:hover td .add-bubble {
   display: block;
   z-index: 1;
 }
 .add-bubble div {
   background: @kallithea-theme-main-color;
-  width: 16px;
-  height: 16px;
-  line-height: 14px;
+  width: 1.2em;
+  height: 1.2em;
+  line-height: 1em;
   cursor: pointer;
-  padding: 0 2px 2px 0.5px;
+  padding: 0.1em 0.1em 0.1em 0.12em;
   border: 1px solid @kallithea-theme-main-color;
-  border-radius: 3px;
+  border-radius: 0.2em;
   box-sizing: border-box;
   overflow: hidden;
 }
 .add-bubble div:before {
-  font-size: 14px;
+  font-size: 1em;
   color: #ffffff;
   font-family: "kallithea";
   content: '\1f5ea';
--- a/kallithea/front-end/style.less	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/front-end/style.less	Thu May 27 21:27:37 2021 +0200
@@ -564,10 +564,6 @@
     background-position: 20px 0;
   }
 }
-.comment-preview.failed .user,
-.comment-preview.failed .panel-body {
-  color: #666;
-}
 .comment-preview .comment-submission-status {
   float: right;
 }
--- a/kallithea/i18n/be/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/be/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -11,24 +11,21 @@
 "Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
 "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
 
+msgid "Repository not found in the filesystem"
+msgstr "Рэпазітар не знойдзены на файлавай сістэме"
+
 msgid "There are no changesets yet"
 msgstr "Яшчэ не было змен"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Набор змен для %s %s не знойдзены ў %s"
+
 msgid "None"
 msgstr "Нічога"
 
 msgid "(closed)"
 msgstr "(зачынена)"
 
-msgid "Show whitespace"
-msgstr "Паказваць прабелы"
-
-msgid "Ignore whitespace"
-msgstr "Ігнараваць прабелы"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Павялічыць кантэкст да %(num)s радкоў"
-
 msgid "Successfully deleted pull request %s"
 msgstr "Pull-запыт %s паспяхова выдалены"
 
@@ -462,13 +459,6 @@
 msgid "Updated VCS settings"
 msgstr "Абноўлены налады VCS"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"Немагчыма ўключыць падтрымку hgsubversion. Бібліятэка hgsubversion "
-"адсутнічае"
-
 msgid "Error occurred while updating application settings"
 msgstr "Памылка пры абнаўленні наладаў праграмы"
 
@@ -566,12 +556,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Старонка даступная толькі аўтарызаваным карыстальнікам"
 
-msgid "Repository not found in the filesystem"
-msgstr "Рэпазітар не знойдзены на файлавай сістэме"
-
-msgid "Changeset for %s %s not found in %s"
-msgstr "Набор змен для %s %s не знойдзены ў %s"
-
 msgid "Binary file"
 msgstr "Двайковы файл"
 
@@ -584,6 +568,9 @@
 msgid "No changes detected"
 msgstr "Змен не выяўлена"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Павялічыць кантэкст да %(num)s радкоў"
+
 msgid "Deleted branch: %s"
 msgstr "Выдаленая галіна: %s"
 
@@ -695,15 +682,6 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"Рэпазітар %s адсутнічае ў базе дадзеных; магчыма, ён быў створаны ці "
-"пераназваны з файлавай сістэмы. Калі ласка, перазапусціце прыкладанне для "
-"сканавання рэпазітароў"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d год"
@@ -755,24 +733,12 @@
 msgid "just now"
 msgstr "цяпер"
 
-msgid "on line %s"
-msgstr "на радку %s"
-
-msgid "[Mention]"
-msgstr "[Згадванне]"
-
 msgid "top level"
 msgstr "верхні ўзровень"
 
 msgid "Kallithea Administrator"
 msgstr "Адміністратар Kallithea"
 
-msgid "Only admins can create repository groups"
-msgstr "Толькі адміністратары могуць ствараць групы репазітароў"
-
-msgid "Non-admins can create repository groups"
-msgstr "Неадміністратары могуць ствараць групы репазітароў"
-
 msgid "Only admins can create user groups"
 msgstr "Толькі адміністратары могуць ствараць групы карыстальнікаў"
 
@@ -827,17 +793,9 @@
 msgid "Closing"
 msgstr "Зачынены"
 
-msgid ""
-"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
-msgstr ""
-"%(user)s просіць вас разгледзець pull request %(pr_nice_id)s: %(pr_title)s"
-
 msgid "latest tip"
 msgstr "апошняя версія"
 
-msgid "New user registration"
-msgstr "Рэгістрацыя новага карыстальніка"
-
 msgid ""
 "You can't remove this user since it is crucial for the entire application"
 msgstr ""
@@ -944,13 +902,6 @@
 msgid "Invalid repository URL"
 msgstr "Няслушны URL рэпазітара"
 
-msgid ""
-"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
-"svn+https URL"
-msgstr ""
-"Няслушны URL рэпазітара. Ён мусіць быць карэктным URL http, https, ssh, "
-"svn+http ці svn+https"
-
 msgid "Fork has to be the same type as parent"
 msgstr "Тып форка будзе супадаць з бацькоўскім"
 
@@ -1042,10 +993,10 @@
 msgid "Password"
 msgstr "Пароль"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "Забыліся на пароль?"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "Няма акаўнта?"
 
 msgid "Sign In"
@@ -1627,9 +1578,6 @@
 msgid "Save Settings"
 msgstr "Захаваць налады"
 
-msgid "Custom Hooks"
-msgstr "Карыстальніцкія хукі"
-
 msgid "Failed to remove hook"
 msgstr "Не атрымалася выдаліць хук"
 
@@ -1675,9 +1623,6 @@
 msgid "Enable largefiles extension"
 msgstr "Уключыць падтрымку вялікіх файлаў"
 
-msgid "Enable hgsubversion extension"
-msgstr "Уключыць падтрымку hgsubversion"
-
 msgid "Location of repositories"
 msgstr "Месцазнаходжанне рэпазітароў"
 
@@ -1967,8 +1912,8 @@
 msgid "Failed to revoke permission"
 msgstr "Не атрымалася адклікаць прывілеі"
 
-msgid "Confirm to revoke permission for {0}: {1} ?"
-msgstr "Пацвердзіце выдаленне прывілею для {0}: {1} ?"
+msgid "Confirm to revoke permission for {0}: {1}?"
+msgstr "Пацвердзіце выдаленне прывілею для {0}: {1}?"
 
 msgid "Select changeset"
 msgstr "Выбраць набор змен"
@@ -2214,6 +2159,9 @@
 msgid "File diff"
 msgstr "Параўнанне файлаў"
 
+msgid "Ignore whitespace"
+msgstr "Ігнараваць прабелы"
+
 msgid "%s File Diff"
 msgstr "Параўнанне файла %s"
 
--- a/kallithea/i18n/da/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/da/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -10,24 +10,25 @@
 "Content-Transfer-Encoding: 8bit\n"
 "Plural-Forms: nplurals=2; plural=n != 1;\n"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr "CSRF-token lækage opdaget, alle form-tokens er invalideret"
+
+msgid "Repository not found in the filesystem"
+msgstr "Repository ikke fundet i filsystemet"
+
 msgid "There are no changesets yet"
 msgstr "Der er ingen changesets endnu"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Changeset for %s %s ikke fundet i %s"
+
 msgid "None"
 msgstr "Ingen"
 
 msgid "(closed)"
 msgstr "(lukket)"
 
-msgid "Show whitespace"
-msgstr "Vis mellemrum"
-
-msgid "Ignore whitespace"
-msgstr "Ignorer mellemrum"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Øg diff konteksten med %(num)s linjer"
-
 msgid "Successfully deleted pull request %s"
 msgstr "Pull-forespørgsel %s slettet successfuldt"
 
@@ -495,13 +496,6 @@
 msgid "Updated VCS settings"
 msgstr "Opdateret VCS-indstillinger"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"Ude af stand til at aktivere hgsubversion understøttelse. \"hgsubversion"
-"\" biblioteket mangler"
-
 msgid "Error occurred while updating application settings"
 msgstr "Der opstod en fejl ved opdatering af applikationsindstillinger"
 
@@ -598,16 +592,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Du skal være logget ind for at se denne side"
 
-msgid ""
-"CSRF token leak has been detected - all form tokens have been expired"
-msgstr "CSRF-token lækage opdaget, alle form-tokens er invalideret"
-
-msgid "Repository not found in the filesystem"
-msgstr "Repository ikke fundet i filsystemet"
-
-msgid "Changeset for %s %s not found in %s"
-msgstr "Changeset for %s %s ikke fundet i %s"
-
 msgid "Binary file"
 msgstr "Binær fil"
 
@@ -620,6 +604,9 @@
 msgid "No changes detected"
 msgstr "Ingen ændringer fundet"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Øg diff konteksten med %(num)s linjer"
+
 msgid "Deleted branch: %s"
 msgstr "Slettet branch: %s"
 
@@ -731,14 +718,6 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"%s repository er ikke knyttet til db, måske var det skabt eller omdøbt "
-"fra filsystemet, kør applikationen igen for at scanne repositories"
-
 msgid "in %s"
 msgstr "i %s"
 
@@ -754,12 +733,6 @@
 msgid "just now"
 msgstr "lige nu"
 
-msgid "on line %s"
-msgstr "på linje %s"
-
-msgid "[Mention]"
-msgstr "[Omtale]"
-
 msgid "top level"
 msgstr "top-niveau"
 
@@ -802,12 +775,6 @@
 msgid "Default user has admin access to new user groups"
 msgstr "Standard-bruger har admin-adgang til nye brugergrupper"
 
-msgid "Only admins can create repository groups"
-msgstr "Kun administratorer kan oprette repository-grupper"
-
-msgid "Non-admins can create repository groups"
-msgstr "Ikke-administratorer kan oprette repository-grupper"
-
 msgid "Only admins can create user groups"
 msgstr "Kun administratorer kan oprette brugergrupper"
 
@@ -820,17 +787,6 @@
 msgid "Non-admins can create top level repositories"
 msgstr "Ikke-administratorer kan oprette top-niveau repositories"
 
-msgid ""
-"Repository creation enabled with write permission to a repository group"
-msgstr ""
-"Repository oprettelse aktiveret med skriveadgang til en repository-gruppe"
-
-msgid ""
-"Repository creation disabled with write permission to a repository group"
-msgstr ""
-"Repository oprettelse deaktiveret med skriveadgang til en repository-"
-"gruppe"
-
 msgid "Only admins can fork repositories"
 msgstr "Kun admins kan fork repositories"
 
@@ -873,13 +829,6 @@
 msgid "Name must not contain only digits"
 msgstr "Navn må ikke kun indeholde cifre"
 
-msgid ""
-"[Comment] %(repo_name)s changeset %(short_id)s \"%(message_short)s\" on "
-"%(branch)s"
-msgstr ""
-"[Kommentar] %(repo_name)s changeset %(short_id)s \"%(message_short)s\" på "
-"%(branch)s"
-
 msgid "New user %(new_username)s registered"
 msgstr "Ny bruger %(new_username)s registreret"
 
@@ -900,11 +849,8 @@
 msgid "Closing"
 msgstr "Lukning"
 
-msgid ""
-"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
-msgstr ""
-"%(user)s vil have dig til at gennemgå pull-forespørgsel %(pr_nice_id)s: "
-"%(pr_title)s"
-
 msgid "Cannot create empty pull request"
 msgstr "Kan ikke oprette en tom pull-forespørgsel"
+
+msgid "Ignore whitespace"
+msgstr "Ignorer mellemrum"
--- a/kallithea/i18n/de/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/de/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -10,6 +10,14 @@
 "Content-Transfer-Encoding: 8bit\n"
 "Plural-Forms: nplurals=2; plural=n != 1;\n"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr ""
+"Es wurde ein CSRF Leck entdeckt. Alle Formular Token sind abgelaufen"
+
+msgid "Repository not found in the filesystem"
+msgstr "Das Repository konnte nicht im Filesystem gefunden werden"
+
 msgid "There are no changesets yet"
 msgstr "Es gibt noch keine Änderungssätze"
 
@@ -19,15 +27,6 @@
 msgid "(closed)"
 msgstr "(geschlossen)"
 
-msgid "Show whitespace"
-msgstr "Zeige unsichtbare Zeichen"
-
-msgid "Ignore whitespace"
-msgstr "Ignoriere unsichtbare Zeichen"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Erhöhe diff-Kontext auf %(num)s Zeilen"
-
 msgid "Successfully deleted pull request %s"
 msgstr "Pull-Request %s erfolgreich gelöscht"
 
@@ -486,13 +485,6 @@
 msgid "Updated VCS settings"
 msgstr "VCS-Einstellungen aktualisiert"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"hgsubversion-Unterstützung konnte nicht aktiviert werden. Die "
-"\"hgsubversion\"-Bibliothek fehlt"
-
 msgid "Error occurred while updating application settings"
 msgstr ""
 "Ein Fehler ist während der Aktualisierung der Applikationseinstellungen "
@@ -520,11 +512,6 @@
 msgid "Send email task created"
 msgstr "Task zum Versenden von E-Mails erstellt"
 
-msgid "Builtin hooks are read-only. Please use another hook name."
-msgstr ""
-"Die eingebauten Hooks sind schreibgeschützt. Bitte verwenden Sie einen "
-"anderen Hook-Namen."
-
 msgid "Added new hook"
 msgstr "Neuer Hook hinzugefügt"
 
@@ -604,14 +591,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Sie müssen sich anmelden um diese Seite aufzurufen"
 
-msgid ""
-"CSRF token leak has been detected - all form tokens have been expired"
-msgstr ""
-"Es wurde ein CSRF Leck entdeckt. Alle Formular Token sind abgelaufen"
-
-msgid "Repository not found in the filesystem"
-msgstr "Das Repository konnte nicht im Filesystem gefunden werden"
-
 msgid "Binary file"
 msgstr "Binäre Datei"
 
@@ -624,6 +603,9 @@
 msgid "No changes detected"
 msgstr "Keine Änderungen erkannt"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Erhöhe diff-Kontext auf %(num)s Zeilen"
+
 msgid "Deleted branch: %s"
 msgstr "Branch %s gelöscht"
 
@@ -732,15 +714,6 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"Das %s Repository ist nicht in der Datenbank vorhanden, eventuell wurde "
-"es im Dateisystem erstellt oder umbenannt. Bitte starten sie die "
-"Applikation erneut um die Repositories neu zu Indizieren"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d Jahr"
@@ -786,12 +759,6 @@
 msgid "just now"
 msgstr "jetzt gerade"
 
-msgid "on line %s"
-msgstr "in Zeile %s"
-
-msgid "[Mention]"
-msgstr "[Mention]"
-
 msgid "top level"
 msgstr "höchste Ebene"
 
@@ -835,12 +802,6 @@
 msgid "Default user has admin access to new user groups"
 msgstr "Der Standard-Benutzer hat Admin-Rechte auf neuen Benutzer-Gruppen"
 
-msgid "Only admins can create repository groups"
-msgstr "Nur Admins können Repository-Gruppen erstellen"
-
-msgid "Non-admins can create repository groups"
-msgstr "Nicht-Admins können Repository-Gruppen erstellen"
-
 msgid "Only admins can create user groups"
 msgstr "Nur Admins können Benutzer-Gruppen erstellen"
 
@@ -853,18 +814,6 @@
 msgid "Non-admins can create top level repositories"
 msgstr "Nicht-Admins können Repositories oberster Ebene erstellen"
 
-msgid ""
-"Repository creation enabled with write permission to a repository group"
-msgstr ""
-"Erstellung von Repositories mit Schreibzugriff für Repositorygruppe "
-"aktiviert"
-
-msgid ""
-"Repository creation disabled with write permission to a repository group"
-msgstr ""
-"Erstellung von Repositories mit Schreibzugriff für Repositorygruppe "
-"deaktiviert"
-
 msgid "Only admins can fork repositories"
 msgstr "Nur Admins können Repositories forken"
 
@@ -926,9 +875,6 @@
 msgid "latest tip"
 msgstr "Letzter Tip"
 
-msgid "New user registration"
-msgstr "Neue Benutzerregistrierung"
-
 msgid ""
 "User \"%s\" still owns %s repositories and cannot be removed. Switch "
 "owners or remove those repositories: %s"
@@ -1021,13 +967,6 @@
 msgid "Invalid repository URL"
 msgstr "Ungültige Repository-URL"
 
-msgid ""
-"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
-"svn+https URL"
-msgstr ""
-"Ungültige Repository-URL. Es muss eine gültige http, https, ssh, svn+http "
-"oder svn+https URL sein"
-
 msgid "Fork has to be the same type as parent"
 msgstr "Forke um den selben typ wie der Vorgesetze zu haben"
 
@@ -1129,10 +1068,10 @@
 msgid "Stay logged in after browser restart"
 msgstr "Nach dem Neustart des Browsers eingeloggt bleiben"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "Passwort vergessen?"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "Kein Account?"
 
 msgid "Sign In"
@@ -1566,25 +1505,6 @@
 "Aktiviere dies, damit Nicht-Administratoren Repositories auf der obersten "
 "Ebene erstellen können."
 
-msgid ""
-"Note: This will also give all users API access to create repositories "
-"everywhere. That might change in future versions."
-msgstr ""
-"Hinweis: dadurch erhalten auch alle Benutzer API-Zugriff, um überall "
-"Repositories zu erstellen. Das kann sich in zukünftigen Versionen ändern."
-
-msgid "Repository creation with group write access"
-msgstr "Repository-Erstellung mit Gruppen-Schreibzugriff"
-
-msgid ""
-"With this, write permission to a repository group allows creating "
-"repositories inside that group. Without this, group write permissions "
-"mean nothing."
-msgstr ""
-"Falls aktiv, gewährt dies das Recht zum Erzeugen von Repositories in "
-"einer Repository-Gruppe. Falls inaktiv, sind Gruppen-"
-"Schreibberechtigungen wirkungslos."
-
 msgid "User group creation"
 msgstr "Benutzergruppen Erstellung"
 
@@ -1988,12 +1908,6 @@
 msgid "Save Settings"
 msgstr "Einstellungen speichern"
 
-msgid "Built-in Mercurial Hooks (Read-Only)"
-msgstr "Eingebaute Mercurial Hooks (Read -Only)"
-
-msgid "Custom Hooks"
-msgstr "Benutzerdefinierte Hooks"
-
 msgid ""
 "Hooks can be used to trigger actions on certain events such as push / "
 "pull. They can trigger Python functions or external applications."
@@ -2027,27 +1941,6 @@
 msgid "Install Git hooks"
 msgstr "Git-Hooks installieren"
 
-msgid ""
-"Verify if Kallithea's Git hooks are installed for each repository. "
-"Current hooks will be updated to the latest version."
-msgstr ""
-"Überprüfen Sie, ob die Git-Hooks von Kallithea für jedes Repository "
-"installiert sind. Aktuelle Hooks werden auf die neueste Version "
-"aktualisiert."
-
-msgid "Overwrite existing Git hooks"
-msgstr "Bestehende Git-Hooks überschreiben"
-
-msgid ""
-"If installing Git hooks, overwrite any existing hooks, even if they do "
-"not seem to come from Kallithea. WARNING: This operation will destroy any "
-"custom git hooks you may have deployed by hand!"
-msgstr ""
-"Wenn Sie Git-Hooks installieren, überschreiben Sie alle vorhandenen "
-"Hooks, auch wenn sie nicht von Kallithea zu kommen scheinen. WARNUNG: "
-"Diese Operation zerstört alle benutzerdefinierten Git-Hooks, die Sie "
-"möglicherweise von Hand bereitgestellt haben!"
-
 msgid "Rescan Repositories"
 msgstr "Repositories erneut scannen"
 
@@ -2103,17 +1996,6 @@
 msgid "Enable largefiles extension"
 msgstr "Erweiterung largefiles aktivieren"
 
-msgid "Enable hgsubversion extension"
-msgstr "Erweiterung hgsubversion aktivieren"
-
-msgid ""
-"Requires hgsubversion library to be installed. Enables cloning of remote "
-"Subversion repositories while converting them to Mercurial."
-msgstr ""
-"Erfordert die Installation der hgsubversion-Bibliothek. Ermöglicht das "
-"Klonen von entfernten Subversion-Repositories während der Konvertierung "
-"zu Mercurial."
-
 msgid "Location of repositories"
 msgstr "Ort der Repositories"
 
@@ -2351,7 +2233,7 @@
 msgid "Group"
 msgstr "Gruppe"
 
-msgid "Confirm to revoke permission for {0}: {1} ?"
+msgid "Confirm to revoke permission for {0}: {1}?"
 msgstr "Widerruf der Rechte für {0}: {1} bestätigen?"
 
 msgid "Select changeset"
@@ -2441,6 +2323,9 @@
 msgid "Hello %s"
 msgstr "Hallo %s"
 
+msgid "Ignore whitespace"
+msgstr "Ignoriere unsichtbare Zeichen"
+
 msgid "or"
 msgstr "oder"
 
--- a/kallithea/i18n/el/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/el/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -10,24 +10,30 @@
 "Content-Transfer-Encoding: 8bit\n"
 "Plural-Forms: nplurals=2; plural=n != 1;\n"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr ""
+"Εντοπίστηκε διαρροή ενός διακριτικού CSRF - όλα τα διακριτικά της φόρμας "
+"έχουν λήξει"
+
+msgid "Repository not found in the filesystem"
+msgstr "Το αποθετήριο δε βρέθηκε στο σύστημα αρχείων"
+
 msgid "There are no changesets yet"
 msgstr "Δεν υπάρχουν σετ αλλαγών ακόμα"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Το σετ αλλαγών για %s %sδεν βρέθηκε στο %s"
+
+msgid "SSH access is disabled."
+msgstr "Η πρόσβαση μέσω SSH είναι απενεργοποιημένη."
+
 msgid "None"
 msgstr "Χωρίς"
 
 msgid "(closed)"
 msgstr "(κλειστό)"
 
-msgid "Show whitespace"
-msgstr "Εμφάνιση κενού"
-
-msgid "Ignore whitespace"
-msgstr "Αγνόηση κενού"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Αύξηση του diff πλαισίου σε %(num)s γραμμές"
-
 msgid "No permission to change status"
 msgstr "Χωρίς δικαιώματα αλλαγής της κατάστασης"
 
@@ -543,13 +549,6 @@
 msgid "Updated VCS settings"
 msgstr "Ενημερωμένες ρυθμίσεις VCS"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"Δεν γίνεται να ενεργοποιηθεί υποστήριξη για το hgsubversion. Λείπει η "
-"βιβλιοθήκη \"hgsubversion\""
-
 msgid "Error occurred while updating application settings"
 msgstr "Παρουσιάστηκε σφάλμα κατά την ενημέρωση των ρυθμίσεων της εφαρμογής"
 
@@ -578,11 +577,6 @@
 msgid "Hook already exists"
 msgstr "Το άγκιστρο υπάρχει ήδη"
 
-msgid "Builtin hooks are read-only. Please use another hook name."
-msgstr ""
-"Τα ενσωματωμένα άγκιστρα είναι μόνο για ανάγνωση. Παρακαλώ δώστε άλλο "
-"όνομα στο άγκιστρο."
-
 msgid "Added new hook"
 msgstr "Προσθήκη νέου άγκιστρου"
 
@@ -659,21 +653,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Πρέπει να είστε συνδεμένος για να δείτε αυτήν τη σελίδα"
 
-msgid ""
-"CSRF token leak has been detected - all form tokens have been expired"
-msgstr ""
-"Εντοπίστηκε διαρροή ενός διακριτικού CSRF - όλα τα διακριτικά της φόρμας "
-"έχουν λήξει"
-
-msgid "Repository not found in the filesystem"
-msgstr "Το αποθετήριο δε βρέθηκε στο σύστημα αρχείων"
-
-msgid "Changeset for %s %s not found in %s"
-msgstr "Το σετ αλλαγών για %s %sδεν βρέθηκε στο %s"
-
-msgid "SSH access is disabled."
-msgstr "Η πρόσβαση μέσω SSH είναι απενεργοποιημένη."
-
 msgid "Binary file"
 msgstr "Δυαδικό αρχείο"
 
@@ -686,6 +665,9 @@
 msgid "No changes detected"
 msgstr "Δεν εντοπίστηκαν αλλαγές"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Αύξηση του diff πλαισίου σε %(num)s γραμμές"
+
 msgid "Deleted branch: %s"
 msgstr "Διαγραφή κλάδου: %s"
 
@@ -797,40 +779,9 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"Το αποθετήριο δεδομένων %s δεν έχει αντιστοιχιστεί στη βάση δεδομένων. "
-"Ίσως δημιουργήθηκε ή μετονομάστηκε από το σύστημα αρχείων. Εκτελέστε ξανά "
-"την εφαρμογή για να σαρώσετε ξανά τα αποθετήρια δεδομένων"
-
 msgid "SSH key is missing"
 msgstr "Το κλειδί SSH λείπει"
 
-msgid ""
-"Incorrect SSH key - it must have both a key type and a base64 part, like "
-"'ssh-rsa ASRNeaZu4FA...xlJp='"
-msgstr ""
-"Λανθασμένο κλειδί SSH - πρέπει να έχει έναν τύπο κλειδιού καθώς και ένα "
-"τμήμα base64, όπως \"ssh-rsa ASRNeaZu4FA ... xlJp =\""
-
-msgid "Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'"
-msgstr "Εσφαλμένο κλειδί SSH - πρέπει να ξεκινά με 'ssh-(rsa|dss|ed25519)'"
-
-msgid "Incorrect SSH key - unexpected characters in base64 part %r"
-msgstr ""
-"Εσφαλμένο κλειδί SSH - μη αναμενόμενοι χαρακτήρες στο τμήμα base64 %r"
-
-msgid "Incorrect SSH key - failed to decode base64 part %r"
-msgstr ""
-"Εσφαλμένο κλειδί SSH - απέτυχε η αποκωδικοποίηση του τμήματος base64 %r"
-
-msgid "Incorrect SSH key - base64 part is not %r as claimed but %r"
-msgstr ""
-"Εσφαλμένο κλειδί SSH - το base64 μέρος δεν είναι %r όπως ζητήθηκε, αλλά %r"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d έτος"
@@ -876,12 +827,6 @@
 msgid "just now"
 msgstr "μόλις τώρα"
 
-msgid "on line %s"
-msgstr "στη γραμμή %s"
-
-msgid "[Mention]"
-msgstr "[Αναφορά]"
-
 msgid "top level"
 msgstr "ανώτερο επίπεδο"
 
@@ -934,12 +879,6 @@
 msgstr ""
 "Ο προεπιλεγμένος χρήστης έχει πρόσβαση διαχειριστή σε νέες ομάδες χρηστών"
 
-msgid "Only admins can create repository groups"
-msgstr "Μόνο οι διαχειριστές μπορούν να δημιουργήσουν ομάδες αποθετηρίων"
-
-msgid "Non-admins can create repository groups"
-msgstr "Οι μη διαχειριστές μπορούν να δημιουργήσουν ομάδες αποθετηρίων"
-
 msgid "Only admins can create user groups"
 msgstr "Μόνο οι διαχειριστές μπορούν να δημιουργήσουν ομάδες χρηστών"
 
@@ -954,18 +893,6 @@
 msgstr ""
 "Οι μη διαχειριστές μπορούν να δημιουργήσουν αποθετήρια ανώτατου επιπέδου"
 
-msgid ""
-"Repository creation enabled with write permission to a repository group"
-msgstr ""
-"Η δημιουργία αποθετηρίου είναι ενεργοποιημένη με δικαιώματα εγγραφής σε "
-"μια ομάδα αποθετηρίων"
-
-msgid ""
-"Repository creation disabled with write permission to a repository group"
-msgstr ""
-"Η δημιουργία αποθετηρίου απενεργοποιήθηκε με δικαιώματα εγγραφής σε μια "
-"ομάδα αποθετηρίων"
-
 msgid "Only admins can fork repositories"
 msgstr "Μόνο οι διαχειριστές μπορούν να κλωνοποιήσουν τα αποθετήρια"
 
@@ -1014,12 +941,6 @@
 msgid "Closing"
 msgstr "Κλείσιμο"
 
-msgid ""
-"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
-msgstr ""
-"Ο χρήστης %(user)s θέλει να αναθεωρήσετε την αίτηση έλξης %(pr_nice_id)s: "
-"%(pr_title)s"
-
 msgid "Cannot create empty pull request"
 msgstr "Δεν είναι δυνατή η δημιουργία κενής αίτησης έλξης"
 
@@ -1067,9 +988,6 @@
 msgid "SSH key with fingerprint %r found"
 msgstr "Βρέθηκε κλειδί SSH με δακτυλικό αποτύπωμα %r"
 
-msgid "New user registration"
-msgstr "Εγγραφή νέου χρήστη"
-
 msgid ""
 "You can't remove this user since it is crucial for the entire application"
 msgstr ""
@@ -1185,13 +1103,6 @@
 msgid "Invalid repository URL"
 msgstr "Μη έγκυρη διεύθυνση URL αποθετηρίου"
 
-msgid ""
-"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
-"svn+https URL"
-msgstr ""
-"Μη έγκυρη διεύθυνση URL του αποθετηρίου. Πρέπει να είναι μια έγκυρη http, "
-"https, ssh, svn+http ή svn+https διεύθυνση URL"
-
 msgid "Fork has to be the same type as parent"
 msgstr "Ο κλώνος πρέπει να έχει τον ίδιο τύπο με τον γονέα του"
 
@@ -1293,10 +1204,10 @@
 msgstr ""
 "Μείνετε συνδεδεμένοι μετά την επανεκκίνηση του προγράμματος περιήγησης"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "Ξεχάσατε τον κωδικό σας;"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "Δεν έχετε λογαριασμό;"
 
 msgid "Sign In"
@@ -1788,26 +1699,6 @@
 "Ενεργοποιήστε αυτήν την επιλογή ώστε να επιτρέπεται σε μη διαχειριστές να "
 "δημιουργούν αποθετήρια στο ανώτερο επίπεδο."
 
-msgid ""
-"Note: This will also give all users API access to create repositories "
-"everywhere. That might change in future versions."
-msgstr ""
-"Σημείωση: Αυτό θα δώσει επίσης σε όλους τους χρήστες πρόσβαση API για τη "
-"δημιουργία αποθετηρίων παντού. Αυτό μπορεί να αλλάξει σε μελλοντικές "
-"εκδόσεις."
-
-msgid "Repository creation with group write access"
-msgstr "Δημιουργία αποθετηρίου με πρόσβαση εγγραφής ομάδας"
-
-msgid ""
-"With this, write permission to a repository group allows creating "
-"repositories inside that group. Without this, group write permissions "
-"mean nothing."
-msgstr ""
-"Με αυτό, η άδεια εγγραφής σε μια ομάδα αποθετηρίων επιτρέπει τη "
-"δημιουργία αποθετηρίων εντός αυτής της ομάδας. Χωρίς αυτό, τα δικαιώματα "
-"ομαδικής εγγραφής δεν σημαίνουν τίποτα."
-
 msgid "User group creation"
 msgstr "Δημιουργία ομάδας χρηστών"
 
@@ -2245,12 +2136,6 @@
 msgid "Save Settings"
 msgstr "Αποθήκευση Ρυθμίσεων"
 
-msgid "Built-in Mercurial Hooks (Read-Only)"
-msgstr "Ενσωματωμένοι Mercurial Hooks (μόνο για ανάγνωση)"
-
-msgid "Custom Hooks"
-msgstr "Προσαρμοσμένα άγκιστρα"
-
 msgid "Failed to remove hook"
 msgstr "Απέτυχε η αφαίρεση γάντζου"
 
@@ -2279,26 +2164,6 @@
 msgid "Install Git hooks"
 msgstr "Εγκατάσταση Git hooks"
 
-msgid ""
-"Verify if Kallithea's Git hooks are installed for each repository. "
-"Current hooks will be updated to the latest version."
-msgstr ""
-"Επαληθεύστε εάν τα Git hooks της Καλλιθέας είναι εγκατεστημένα για κάθε "
-"αποθετήριο. Τα τρέχοντα hooks θα ενημερωθούν στην τελευταία έκδοση."
-
-msgid "Overwrite existing Git hooks"
-msgstr "Αντικατάσταση υπαρχόντων Git hooks"
-
-msgid ""
-"If installing Git hooks, overwrite any existing hooks, even if they do "
-"not seem to come from Kallithea. WARNING: This operation will destroy any "
-"custom git hooks you may have deployed by hand!"
-msgstr ""
-"Εάν εγκαθιστάτε Git hooks, αντικαταστήστε τυχόν υπάρχοντα hooks, ακόμα κι "
-"αν δεν φαίνεται να προέρχονται από την Καλλιθέα. ΠΡΟΕΙΔΟΠΟΙΗΣΗ: Αυτή η "
-"λειτουργία θα καταστρέψει τυχόν προσαρμοσμένα git hooks που μπορεί να "
-"έχετε αναπτύξει με το χέρι!"
-
 msgid "Rescan Repositories"
 msgstr "Επανασάρωση αποθετηρίων"
 
@@ -2354,17 +2219,6 @@
 msgid "Enable largefiles extension"
 msgstr "Ενεργοποίηση επέκτασης μεγάλων αρχείων"
 
-msgid "Enable hgsubversion extension"
-msgstr "Ενεργοποίηση επέκτασης hgsubversion"
-
-msgid ""
-"Requires hgsubversion library to be installed. Enables cloning of remote "
-"Subversion repositories while converting them to Mercurial."
-msgstr ""
-"Απαιτεί την εγκατάσταση της βιβλιοθήκης hgsubversion. Ενεργοποιεί την "
-"κλωνοποίηση απομακρυσμένων Subversion αποθετηρίων και τη μετατροπή τους "
-"σε Mercurial."
-
 msgid "Location of repositories"
 msgstr "Τοποθεσία αποθετηρίων"
 
@@ -2727,9 +2581,6 @@
 msgid "Forgot password?"
 msgstr "Ξεχάσατε τον κωδικό πρόσβασης;"
 
-msgid "Don't have an account?"
-msgstr "Δεν έχετε λογαριασμό;"
-
 msgid "Log Out"
 msgstr "Αποσύνδεση"
 
@@ -2840,7 +2691,7 @@
 msgid "Failed to revoke permission"
 msgstr "Απέτυχε η ανάκληση του δικαιωμάτος"
 
-msgid "Confirm to revoke permission for {0}: {1} ?"
+msgid "Confirm to revoke permission for {0}: {1}?"
 msgstr "Επιβεβαιώστε την ανάκληση του δικαιώματος για {0}: {1};"
 
 msgid "Select changeset"
@@ -3260,6 +3111,9 @@
 msgid "File diff"
 msgstr "Αρχείο διαφοράς"
 
+msgid "Ignore whitespace"
+msgstr "Αγνόηση κενού"
+
 msgid "%s File Diff"
 msgstr "%s Αρχείο διαφοράς"
 
--- a/kallithea/i18n/es/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/es/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -19,15 +19,6 @@
 msgid "(closed)"
 msgstr "(cerrado)"
 
-msgid "Show whitespace"
-msgstr "Mostrar espacios en blanco"
-
-msgid "Ignore whitespace"
-msgstr "Ignorar espacios en blanco"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Aumentar el contexto del diff a %(num)s lineas"
-
 msgid "Successfully deleted pull request %s"
 msgstr "Petición de pull %s eliminada correctamente"
 
@@ -280,5 +271,11 @@
 msgid "Successfully updated gist content"
 msgstr "Gist actualizado correctamente"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Aumentar el contexto del diff a %(num)s lineas"
+
 msgid "Select changeset"
 msgstr "Seleccionar cambios"
+
+msgid "Ignore whitespace"
+msgstr "Ignorar espacios en blanco"
--- a/kallithea/i18n/fr/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/fr/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -10,24 +10,30 @@
 "Content-Transfer-Encoding: 8bit\n"
 "Plural-Forms: nplurals=2; plural=n > 1;\n"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr ""
+"Une fuite de jeton CSRF a été détectée - tous les jetons de formulaire "
+"sont considérés comme expirés"
+
+msgid "Repository not found in the filesystem"
+msgstr "Dépôt non trouvé sur le système de fichiers"
+
 msgid "There are no changesets yet"
 msgstr "Il n’y a aucun changement pour le moment"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Ensemble de changements pour %s %s non trouvé dans %s"
+
+msgid "SSH access is disabled."
+msgstr "L'accès SSH est désactivé."
+
 msgid "None"
 msgstr "Aucun"
 
 msgid "(closed)"
 msgstr "(fermé)"
 
-msgid "Show whitespace"
-msgstr "Afficher les espaces et tabulations"
-
-msgid "Ignore whitespace"
-msgstr "Ignorer les espaces et tabulations"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Augmenter le contexte du diff à %(num)s lignes"
-
 msgid "No permission to change status"
 msgstr "Permission manquante pour changer le statut"
 
@@ -549,13 +555,6 @@
 msgid "Updated VCS settings"
 msgstr "Réglages des gestionnaires de versions mis à jour"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"Impossible d'activer la prise en charge de hgsubversion. La bibliothèque "
-"« hgsubversion » est manquante"
-
 msgid "Error occurred while updating application settings"
 msgstr ""
 "Une erreur est survenue durant la mise à jour des réglages de "
@@ -587,10 +586,12 @@
 msgid "Hook already exists"
 msgstr "Le hook existe déjà"
 
-msgid "Builtin hooks are read-only. Please use another hook name."
+msgid ""
+"Hook names with \".kallithea_\" are reserved for internal use. Please use "
+"another hook name."
 msgstr ""
-"Les hooks intégrés sont en lecture seule. Merci de choisir un autre nom "
-"pour le hook."
+"Les noms de hook avec \".kallithea_\" sont réservés pour un usage interne. "
+"Merci de choisir un autre nom pour le hook."
 
 msgid "Added new hook"
 msgstr "Le nouveau hook a été ajouté"
@@ -671,21 +672,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Vous devez être connecté pour visualiser cette page"
 
-msgid ""
-"CSRF token leak has been detected - all form tokens have been expired"
-msgstr ""
-"Une fuite de jeton CSRF a été détectée - tous les jetons de formulaire "
-"sont considérés comme expirés"
-
-msgid "Repository not found in the filesystem"
-msgstr "Dépôt non trouvé sur le système de fichiers"
-
-msgid "Changeset for %s %s not found in %s"
-msgstr "Ensemble de changements pour %s %s non trouvé dans %s"
-
-msgid "SSH access is disabled."
-msgstr "L'accès SSH est désactivé."
-
 msgid "Binary file"
 msgstr "Fichier binaire"
 
@@ -698,6 +684,15 @@
 msgid "No changes detected"
 msgstr "Aucun changement détecté"
 
+msgid "Show whitespace changes"
+msgstr "Afficher les modifications d'espaces et de tabulations"
+
+msgid "Ignore whitespace changes"
+msgstr "Ignorer les modifications d'espaces et de tabulations"
+
+msgid "Increase diff context to %(num)s lines"
+msgstr "Augmenter le contexte du diff à %(num)s lignes"
+
 msgid "Deleted branch: %s"
 msgstr "Branche supprimée : %s"
 
@@ -809,40 +804,55 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"Le dépôt %s n’est pas représenté dans la base de données. Il a "
-"probablement été créé ou renommé manuellement. Veuillez relancer "
-"l’application pour rescanner les dépôts"
-
 msgid "SSH key is missing"
 msgstr "La clé SSH est manquante"
 
 msgid ""
-"Incorrect SSH key - it must have both a key type and a base64 part, like "
+"Invalid SSH key - it must have both a key type and a base64 part, like "
 "'ssh-rsa ASRNeaZu4FA...xlJp='"
 msgstr ""
-"Clé SSH incorrecte – elle doit comporter à la fois un type de clé et une "
+"Clé SSH invalide – elle doit comporter à la fois un type de clé et une "
 "partie base64, comme 'ssh-rsa ASRNeaZu4FA...xlJp='"
 
-msgid "Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'"
+msgid ""
+"Invalid SSH key - it must start with key type 'ssh-rsa', 'ssh-dss', 'ssh-"
+"ed448', or 'ssh-ed25519'"
 msgstr ""
-"Clé SSH incorrecte – elle doit commencer par « ssh-(rsa|dss|ed25519) »"
-
-msgid "Incorrect SSH key - unexpected characters in base64 part %r"
+"Clé SSH invalide – elle doit commencer par le type de clé 'ssh-rsa', 'ssh-"
+"dss', 'ssh-ed448', ou 'ssh-ed25519'"
+
+msgid "Invalid SSH key - unexpected characters in base64 part %r"
+msgstr "Clé SSH invalide – caractères inattendus dans la partie base 64 %r"
+
+msgid ""
+"Invalid SSH key - base64 part %r seems truncated (it can't be decoded)"
 msgstr ""
-"Clé SSH incorrecte – caractères inattendus dans la partie base 64 %r"
-
-msgid "Incorrect SSH key - failed to decode base64 part %r"
-msgstr "Clé SSH incorrecte – échec du décodage de la partie base64 %r"
-
-msgid "Incorrect SSH key - base64 part is not %r as claimed but %r"
+"Clé SSH invalide – la partie base64 %r semble tronquée (elle ne peut pas "
+"être décodée)"
+
+msgid ""
+"Invalid SSH key - base64 part %r seems truncated (it contains a partial "
+"string length)"
 msgstr ""
-"Clé SSH incorrecte – la partie base 64 n'est pas %r comme il est dit mais "
-"%r"
+"Clé SSH invalide – la partie base64 %r semble tronquée (elle contient une "
+"taille partielle)"
+
+msgid ""
+"Invalid SSH key - base64 part %r seems truncated (it is too short for "
+"declared string length %s)"
+msgstr ""
+"Clé SSH invalide – la partie base64 %r semble tronquée (elle est trop court "
+"pour la taille déclarée %s)"
+
+msgid ""
+"Invalid SSH key - base64 part %r seems truncated (it contains too few "
+"strings for a %s key)"
+msgstr ""
+"Clé SSH invalide – la partie base64 %r semble tronquée (elle ne contient pas "
+"assez de parties pour une clé %s)"
+
+msgid "Invalid SSH key - it is a %s key but the base64 part contains %r"
+msgstr "Clé SSH invalide – c'est une clé %s mais la partie base64 contient %r"
 
 msgid "%d year"
 msgid_plural "%d years"
@@ -889,12 +899,6 @@
 msgid "just now"
 msgstr "à l’instant"
 
-msgid "on line %s"
-msgstr "à la ligne %s"
-
-msgid "[Mention]"
-msgstr "[Mention]"
-
 msgid "top level"
 msgstr "niveau supérieur"
 
@@ -952,13 +956,6 @@
 "L'utilisateur par défaut a un accès administrateur aux nouveaux groupes "
 "d'utilisateurs"
 
-msgid "Only admins can create repository groups"
-msgstr "Seul un administrateur peut créer un groupe de dépôts"
-
-msgid "Non-admins can create repository groups"
-msgstr ""
-"Les utilisateurs non-administrateurs peuvent créer des groupes de dépôts"
-
 msgid "Only admins can create user groups"
 msgstr "Seul un administrateur peut créer des groupes d'utilisateurs"
 
@@ -975,18 +972,6 @@
 "Les utilisateurs non-administrateurs peuvent créer des dépôts de niveau "
 "supérieur"
 
-msgid ""
-"Repository creation enabled with write permission to a repository group"
-msgstr ""
-"Création de dépôts activée avec l'accès en écriture vers un groupe de "
-"dépôts"
-
-msgid ""
-"Repository creation disabled with write permission to a repository group"
-msgstr ""
-"Création de dépôts désactivée avec l'accès en écriture vers un groupe de "
-"dépôts"
-
 msgid "Only admins can fork repositories"
 msgstr "Seul un administrateur peut faire un fork de dépôt"
 
@@ -1032,10 +1017,10 @@
 
 msgid ""
 "[Comment] %(repo_name)s changeset %(short_id)s \"%(message_short)s\" on "
-"%(branch)s"
+"%(branch)s by %(cs_author_username)s"
 msgstr ""
-"[Commentaire] Changeset %(short_id)s « %(message_short)s » de "
-"%(repo_name)s dans %(branch)s"
+"[Commentaire] Changeset %(short_id)s « %(message_short)s » de %(repo_name)s "
+"dans %(branch)s par %(cs_author_username)s"
 
 msgid "New user %(new_username)s registered"
 msgstr "Nouvel utilisateur %(new_username)s enregistré"
@@ -1057,12 +1042,6 @@
 msgid "Closing"
 msgstr "Fermeture"
 
-msgid ""
-"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
-msgstr ""
-"%(user)s veut que vous regardiez la demande de pull %(pr_nice_id)s : "
-"%(pr_title)s"
-
 msgid "Cannot create empty pull request"
 msgstr "Impossible de créer une requête de pull vide"
 
@@ -1110,9 +1089,6 @@
 msgid "SSH key with fingerprint %r found"
 msgstr "Clé SSH avec l'empreinte %r trouvée"
 
-msgid "New user registration"
-msgstr "Nouveau enregistrement d'utilisateur"
-
 msgid ""
 "You can't remove this user since it is crucial for the entire application"
 msgstr ""
@@ -1227,12 +1203,9 @@
 msgid "Invalid repository URL"
 msgstr "URL de dépôt invalide"
 
-msgid ""
-"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
-"svn+https URL"
+msgid "Invalid repository URL. It must be a valid http, https, or ssh URL"
 msgstr ""
-"URL de dépôt invalide. Ce doit être une URL valide de type http, https, "
-"ssh, svn+http ou svn+https"
+"URL de dépôt invalide. Ce doit être une URL valide de type http, https ou ssh"
 
 msgid "Fork has to be the same type as parent"
 msgstr "Le fork doit être du même type que le parent"
@@ -1334,10 +1307,10 @@
 msgid "Stay logged in after browser restart"
 msgstr "Rester connecté après un redémarrage du navigateur"
 
-msgid "Forgot your password ?"
-msgstr "Mot de passe oublié ?"
-
-msgid "Don't have an account ?"
+msgid "Forgot your password?"
+msgstr "Mot de passe oublié?"
+
+msgid "Don't have an account?"
 msgstr "Vous n’avez pas de compte ?"
 
 msgid "Sign In"
@@ -1826,26 +1799,6 @@
 "Activer pour autoriser les non-administrateurs à créer des dépôts au "
 "niveau supérieur."
 
-msgid ""
-"Note: This will also give all users API access to create repositories "
-"everywhere. That might change in future versions."
-msgstr ""
-"Note : Cela autorisera également tous les utilisateurs à utiliser l'API "
-"pour créer des dépôts partout. Ce comportement peut changer dans des "
-"versions futures."
-
-msgid "Repository creation with group write access"
-msgstr "Création de dépôts avec l'accès en écriture du groupe"
-
-msgid ""
-"With this, write permission to a repository group allows creating "
-"repositories inside that group. Without this, group write permissions "
-"mean nothing."
-msgstr ""
-"Avec ceci, le droit d'écriture dans un groupe de dépôt donne le droit de "
-"créer des dépôts dans ce groupe. Sans ceci, le droit d'écriture pour les "
-"groupes n'a pas d'impact."
-
 msgid "User group creation"
 msgstr "Création de groupes d'utilisateurs"
 
@@ -2276,11 +2229,8 @@
 msgid "Save Settings"
 msgstr "Enregistrer les options"
 
-msgid "Built-in Mercurial Hooks (Read-Only)"
-msgstr "Hooks Mercurial intégrés (lecture seule)"
-
-msgid "Custom Hooks"
-msgstr "Hooks personnalisés"
+msgid "Custom Global Mercurial Hooks"
+msgstr "Hooks Mercurial globaux personnalisés"
 
 msgid ""
 "Hooks can be used to trigger actions on certain events such as push / "
@@ -2290,6 +2240,21 @@
 "certains évènements comme le push et le pull. Ils peuvent déclencher des "
 "fonctions Python ou des applications externes."
 
+msgid "Git Hooks"
+msgstr "Git Hooks"
+
+msgid ""
+"Kallithea has no support for custom Git hooks. Kallithea will use Git "
+"post-receive hooks internally. Installation of these hooks is managed in "
+"%s."
+msgstr ""
+"Kallithea ne supporte pas les hooks Git personnalisés. Kallithea utilise des "
+"hooks Git de post-réception en interne. L'installation de ces hooks est "
+"gérée dans %s."
+
+msgid "Custom Hooks are not enabled"
+msgstr "Les Hooks personnalisés ne sont pas activés"
+
 msgid "Failed to remove hook"
 msgstr "Erreur lors de la suppression du hook"
 
@@ -2318,23 +2283,25 @@
 msgstr "Installer des hooks Git"
 
 msgid ""
-"Verify if Kallithea's Git hooks are installed for each repository. "
-"Current hooks will be updated to the latest version."
+"Install Kallithea's internal hooks for all Git repositories where they "
+"are missing or can be upgraded. Existing hooks that don't seem to come "
+"from Kallithea will not be touched."
 msgstr ""
-"Vérifier si les hooks Git de Kallithea sont installés pour chaque dépôt. "
-"Les hooks actuels seront mis à jour vers la dernière version."
-
-msgid "Overwrite existing Git hooks"
-msgstr "Écraser les hooks Git existants"
+"Installe les hooks internes de Kallithea pour tous les dépôts Git où ils "
+"sont absents ou s'ils peuvent être mis à jour. Les hooks existants qui ne "
+"semblent pas être livrés avec Kallithea ne seront pas impactés."
+
+msgid "Install and overwrite Git hooks"
+msgstr "Installer et surcharger des hooks Git"
 
 msgid ""
-"If installing Git hooks, overwrite any existing hooks, even if they do "
-"not seem to come from Kallithea. WARNING: This operation will destroy any "
-"custom git hooks you may have deployed by hand!"
+"Install Kallithea's internal hooks for all Git repositories. Existing "
+"hooks that don't seem to come from Kallithea will be disabled by renaming "
+"to .bak extension."
 msgstr ""
-"Lors de l'installation des hooks Git, écraser tous les hooks existants, "
-"même s'ils ne semblent pas provenir de Kallithea. ATTENTION : cette "
-"opération détruira tous les hooks Git que vous avez déployés à la main !"
+"Installe les hooks internes de Kallithea pour tous les dépôts Git. Les hooks "
+"existants qui ne semblent pas être livrés avec Kallithea seront désactivés "
+"en les renommant avec l'extension .bak."
 
 msgid "Rescan Repositories"
 msgstr "Relancer le scan des dépôts"
@@ -2379,6 +2346,9 @@
 msgid "Python Packages"
 msgstr "Paquets Python"
 
+msgid "Mercurial Push Hooks"
+msgstr "Hooks Push Mercurial"
+
 msgid "Show repository size after push"
 msgstr "Afficher la taille du dépôt après un push"
 
@@ -2391,16 +2361,6 @@
 msgid "Enable largefiles extension"
 msgstr "Activer l'extension largefiles"
 
-msgid "Enable hgsubversion extension"
-msgstr "Activer l'extension hgsubversion"
-
-msgid ""
-"Requires hgsubversion library to be installed. Enables cloning of remote "
-"Subversion repositories while converting them to Mercurial."
-msgstr ""
-"La bibliothèque hgsubversion doit être installée. Elle permet de cloner "
-"des dépôts SVN distants et de les migrer vers Mercurial."
-
 msgid "Location of repositories"
 msgstr "Emplacement des dépôts"
 
@@ -2472,6 +2432,47 @@
 "emplacement réseau/hôte du serveur Kallithea en cours d'utilisation."
 
 msgid ""
+"Schema of clone URL construction eg. '{scheme}://{user}@{netloc}/"
+"{repo}'.\n"
+"                                                    The following "
+"variables are available:\n"
+"                                                    {scheme} 'http' or "
+"'https' sent from running Kallithea server,\n"
+"                                                    {user}   current user "
+"username,\n"
+"                                                    {netloc} network "
+"location/server host of running Kallithea server,\n"
+"                                                    {repo}   full "
+"repository name,\n"
+"                                                    {repoid} ID of "
+"repository, can be used to construct clone-by-id,\n"
+"                                                    {system_user}  name "
+"of the Kallithea system user,\n"
+"                                                    {hostname}  server "
+"hostname\n"
+"                                                    "
+msgstr ""
+"Modèle de construction d'URL de clone. Par exemple : "
+"'{scheme}://{user}@{netloc}/{repo}'.\n"
+"                                                       Les variables "
+"suivantes sont disponibles :\n"
+"                                                        {scheme}    'http' "
+"ou 'https' envoyé à partir du serveur Kallithea en cours d'utilisation,\n"
+"                                                        {user}     nom de "
+"l'utilisateur courant,\n"
+"                                                        {netloc}    "
+"emplacement réseau/hôte du serveur Kallithea en cours d'utilisation,\n"
+"                                                        {repo}    nom "
+"complet du dépôt,\n"
+"                                                        {repoid}    ID du "
+"dépôt, peut être utilisé pour cloner par ID,\n"
+"                                                        {system_user}  nom "
+"de l'utilisateur système Kallithea,\n"
+"                                                        {hostname}  nom "
+"d'hôte du serveur\n"
+"                                                    "
+
+msgid ""
 "Schema for constructing SSH clone URL, eg. 'ssh://{system_user}"
 "@{hostname}/{repo}'."
 msgstr ""
@@ -2715,9 +2716,6 @@
 msgid "Forgot password?"
 msgstr "Mot de passe oublié ?"
 
-msgid "Don't have an account?"
-msgstr "Vous n’avez pas de compte ?"
-
 msgid "Log Out"
 msgstr "Se déconnecter"
 
@@ -2827,7 +2825,7 @@
 msgid "Failed to revoke permission"
 msgstr "Échec de la révocation de permission"
 
-msgid "Confirm to revoke permission for {0}: {1} ?"
+msgid "Confirm to revoke permission for {0}: {1}?"
 msgstr "Voulez-vous vraiment révoquer la permission pour {0} : {1} ?"
 
 msgid "Select changeset"
@@ -2908,6 +2906,9 @@
 msgid "Changeset status: %s by %s"
 msgstr "Statut de changeset : %s par %s"
 
+msgid "(No commit message)"
+msgstr "(Pas de message de commit)"
+
 msgid "Expand commit message"
 msgstr "Développer le message de commit"
 
@@ -3070,6 +3071,12 @@
 msgid "Show full side-by-side diff for this file"
 msgstr "Afficher le diff complet côte-à-côte pour ce fichier"
 
+msgid "Raw diff for this file"
+msgstr "Diff brut pour ce fichier"
+
+msgid "Download diff for this file"
+msgstr "Télécharger le diff pour ce fichier"
+
 msgid "Show inline comments"
 msgstr "Afficher les commentaires de ligne"
 
@@ -3243,6 +3250,9 @@
 msgid "File diff"
 msgstr "Diff de fichier"
 
+msgid "Ignore whitespace"
+msgstr "Ignorer les espaces et tabulations"
+
 msgid "%s File Diff"
 msgstr "Diff de fichier pour %s"
 
--- a/kallithea/i18n/ja/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/ja/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -10,6 +10,9 @@
 "Content-Transfer-Encoding: 8bit\n"
 "Plural-Forms: nplurals=1; plural=0;\n"
 
+msgid "Repository not found in the filesystem"
+msgstr "ファイルシステム内にリポジトリが見つかりません"
+
 msgid "There are no changesets yet"
 msgstr "まだチェンジセットがありません"
 
@@ -19,15 +22,6 @@
 msgid "(closed)"
 msgstr "(閉鎖済み)"
 
-msgid "Show whitespace"
-msgstr "空白を表示"
-
-msgid "Ignore whitespace"
-msgstr "空白を無視"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "diff コンテキストを %(num)s 行増やす"
-
 msgid "Such revision does not exist for this repository"
 msgstr "お探しのリビジョンはこのリポジトリにはありません"
 
@@ -435,13 +429,6 @@
 msgid "Updated VCS settings"
 msgstr "VCS設定を更新しました"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"\"hgsubversion\"ライブラリが見つからないため、hgsubversionサポートを有効に"
-"出来ません"
-
 msgid "Error occurred while updating application settings"
 msgstr "アプリケーション設定の更新中にエラーが発生しました"
 
@@ -539,9 +526,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "このページを閲覧するためにはサインインが必要です"
 
-msgid "Repository not found in the filesystem"
-msgstr "ファイルシステム内にリポジトリが見つかりません"
-
 msgid "Binary file"
 msgstr "バイナリファイル"
 
@@ -554,6 +538,9 @@
 msgid "No changes detected"
 msgstr "検出された変更はありません"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "diff コンテキストを %(num)s 行増やす"
+
 msgid "Deleted branch: %s"
 msgstr "削除されたブランチ: %s"
 
@@ -662,15 +649,6 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"%s リポジトリはDB内に見つかりませんでした。おそらくファイルシステム上で作"
-"られたか名前が変更されたためです。リポジトリをもう一度チェックするためにア"
-"プリケーションを再起動してください"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d 年"
@@ -710,12 +688,6 @@
 msgid "just now"
 msgstr "たったいま"
 
-msgid "on line %s"
-msgstr "%s 行目"
-
-msgid "[Mention]"
-msgstr "[Mention]"
-
 msgid "top level"
 msgstr "top level"
 
@@ -733,12 +705,6 @@
 msgstr ""
 "デフォルトユーザーは新しいリポジトリに書き込みアクセスする権限があります"
 
-msgid "Only admins can create repository groups"
-msgstr "管理者のみがリポジトリのグループを作成できます"
-
-msgid "Non-admins can create repository groups"
-msgstr "非管理者がリポジトリのグループを作成できます"
-
 msgid "Only admins can create user groups"
 msgstr "管理者だけがユーザー グループを作成することができます"
 
@@ -751,16 +717,6 @@
 msgid "Non-admins can create top level repositories"
 msgstr "非管理者がトップレベルにリポジトリを作成することができます"
 
-msgid ""
-"Repository creation enabled with write permission to a repository group"
-msgstr ""
-"リポジトリグループの書き込みパーミッションを使ったリポジトリ作成が有効です"
-
-msgid ""
-"Repository creation disabled with write permission to a repository group"
-msgstr ""
-"リポジトリグループの書き込みパーミッションを使ったリポジトリ作成は無効です"
-
 msgid "Only admins can fork repositories"
 msgstr "管理者のみがリポジトリをフォークすることができます"
 
@@ -803,18 +759,9 @@
 msgid "Closing"
 msgstr "クローズ"
 
-msgid ""
-"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
-msgstr ""
-"%(user)s がプリリクエスト #%(pr_nice_id)s: %(pr_title)s のレビューを求めて"
-"います"
-
 msgid "latest tip"
 msgstr "最新のtip"
 
-msgid "New user registration"
-msgstr "新規ユーザー登録"
-
 msgid ""
 "User \"%s\" still owns %s repositories and cannot be removed. Switch "
 "owners or remove those repositories: %s"
@@ -1007,10 +954,10 @@
 msgid "Password"
 msgstr "パスワード"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "パスワードを忘れた?"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "アカウントを持っていない?"
 
 msgid "Sign In"
@@ -1357,9 +1304,6 @@
 msgid "Top level repository creation"
 msgstr "トップレベルリポジトリの作成"
 
-msgid "Repository creation with group write access"
-msgstr "グループ書き込み権限でのリポジトリ作成"
-
 msgid "User group creation"
 msgstr "ユーザーグループ作成"
 
@@ -1711,12 +1655,6 @@
 msgid "Save Settings"
 msgstr "設定を保存"
 
-msgid "Built-in Mercurial Hooks (Read-Only)"
-msgstr "組み込みのMercurialフック (編集不可)"
-
-msgid "Custom Hooks"
-msgstr "カスタムフック"
-
 msgid ""
 "Hooks can be used to trigger actions on certain events such as push / "
 "pull. They can trigger Python functions or external applications."
@@ -1737,25 +1675,6 @@
 msgid "Install Git hooks"
 msgstr "Gitフックをインストール"
 
-msgid ""
-"Verify if Kallithea's Git hooks are installed for each repository. "
-"Current hooks will be updated to the latest version."
-msgstr ""
-"各リポジトリに Kallitheas の Gitフックがインストールされているか確認してく"
-"ださい。現在のフックは最新版に更新されます"
-
-msgid "Overwrite existing Git hooks"
-msgstr "既存のGitフックを上書きする"
-
-msgid ""
-"If installing Git hooks, overwrite any existing hooks, even if they do "
-"not seem to come from Kallithea. WARNING: This operation will destroy any "
-"custom git hooks you may have deployed by hand!"
-msgstr ""
-"GitフックをインストールするとKallitheaから設定されたものであっても既存の"
-"フックは全て上書きされます。警告: この操作はあなたが手動で配置したGitのカ"
-"スタムフックを全て破壊します!"
-
 msgid "Rescan Repositories"
 msgstr "リポジトリを再スキャン"
 
@@ -1811,16 +1730,6 @@
 msgid "Enable largefiles extension"
 msgstr "largefilesエクステンションを有効にする"
 
-msgid "Enable hgsubversion extension"
-msgstr "hgsubversionエクステンションを有効にする"
-
-msgid ""
-"Requires hgsubversion library to be installed. Enables cloning of remote "
-"Subversion repositories while converting them to Mercurial."
-msgstr ""
-"hgsubversion ライブラリのインストールが必要です。リモートのSVNリポジトリを"
-"クローンしてMercurialリポジトリに変換するすることが可能です。"
-
 msgid "Location of repositories"
 msgstr "リポジトリの場所"
 
@@ -2166,7 +2075,7 @@
 msgid "Failed to revoke permission"
 msgstr "権限の取消に失敗しました"
 
-msgid "Confirm to revoke permission for {0}: {1} ?"
+msgid "Confirm to revoke permission for {0}: {1}?"
 msgstr "権限 {0}: {1} を取り消してもよろしいですか?"
 
 msgid "Select changeset"
@@ -2408,6 +2317,9 @@
 msgid "File diff"
 msgstr "ファイル差分"
 
+msgid "Ignore whitespace"
+msgstr "空白を無視"
+
 msgid "%s File Diff"
 msgstr "%s ファイル差分"
 
--- a/kallithea/i18n/lb/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/lb/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -13,18 +13,15 @@
 msgid "There are no changesets yet"
 msgstr "Et sinn nach keng Ännerungen do"
 
+msgid "SSH access is disabled."
+msgstr "SSH Accès ass ausgeschalt."
+
 msgid "None"
 msgstr "Keng"
 
 msgid "(closed)"
 msgstr "(Zou)"
 
-msgid "Show whitespace"
-msgstr "Leerzeechen uweisen"
-
-msgid "Ignore whitespace"
-msgstr "Leerzechen ignoréieren"
-
 msgid "No permission to change status"
 msgstr "Keng Erlabnis fir den Status ze änneren"
 
@@ -163,9 +160,6 @@
 msgid "Please enter email address"
 msgstr "Wannechgelift E-Mail-Adress afügen"
 
-msgid "SSH access is disabled."
-msgstr "SSH Accès ass ausgeschalt."
-
 msgid "Binary file"
 msgstr "Binär Datei"
 
@@ -214,6 +208,9 @@
 msgid "No changesets yet"
 msgstr "Nach keng Ännerungen do"
 
+msgid "Ignore whitespace"
+msgstr "Leerzechen ignoréieren"
+
 msgid "There are no forks yet"
 msgstr "Et sinn nach keng Ofzweigungen do"
 
--- a/kallithea/i18n/nb_NO/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/nb_NO/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -19,15 +19,6 @@
 msgid "(closed)"
 msgstr "(lukket)"
 
-msgid "Show whitespace"
-msgstr "Vis blanktegn"
-
-msgid "Ignore whitespace"
-msgstr "Ignorer blanktegn"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Øk diff-bindeleddsinformasjon til %(num)s linjer"
-
 msgid "Successfully deleted pull request %s"
 msgstr "Slettet flettingsforespørsel %s"
 
@@ -436,6 +427,9 @@
 msgid "Binary file"
 msgstr "Binærfil"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Øk diff-bindeleddsinformasjon til %(num)s linjer"
+
 msgid "Fork name %s"
 msgstr "Forgreningsnavn %s"
 
@@ -517,9 +511,6 @@
 msgid "just now"
 msgstr "akkurat nå"
 
-msgid "on line %s"
-msgstr "på linje %s"
-
 msgid "top level"
 msgstr "toppnivå"
 
@@ -614,9 +605,12 @@
 msgid "Password"
 msgstr "Passord"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "Glemt passordet ditt?"
 
+msgid "Don't have an account?"
+msgstr "Mangler du konto?"
+
 msgid "Password Reset"
 msgstr "Passordstilbakestilling"
 
@@ -1141,9 +1135,6 @@
 msgid "Forgot password?"
 msgstr "Glemt passordet?"
 
-msgid "Don't have an account?"
-msgstr "Mangler du konto?"
-
 msgid "Log Out"
 msgstr "Logg ut"
 
@@ -1170,3 +1161,6 @@
 
 msgid "%s comments"
 msgstr "%s kommentarer"
+
+msgid "Ignore whitespace"
+msgstr "Ignorer blanktegn"
--- a/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -19,12 +19,6 @@
 msgid "(closed)"
 msgstr "(gesloten)"
 
-msgid "Show whitespace"
-msgstr "Toon witruimtes"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Vergroot de diff context tot %(num)s lijnen"
-
 msgid "No permission to change status"
 msgstr "Geen toestemming om de status te veranderen"
 
@@ -203,6 +197,9 @@
 msgid "An error occurred during creation of field: %r"
 msgstr "Er is een fout opgetreden tijdens het aanmaken van veld: %r"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Vergroot de diff context tot %(num)s lijnen"
+
 msgid "Changeset %s not found"
 msgstr "Changeset %s werd niet gevonden"
 
--- a/kallithea/i18n/pl/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/pl/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -11,21 +11,27 @@
 "Plural-Forms: nplurals=3; plural=n==1 ? 0 : n%10>=2 && n%10<=4 && (n"
 "%100<10 || n%100>=20) ? 1 : 2;\n"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr ""
+"Wykryto wyciek tokenu CSRF — wszystkie tokeny formularza zostały "
+"unieważnione"
+
 msgid "There are no changesets yet"
 msgstr "Brak zestawienia zmian"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Zmiany dla %s %s nie zostały znalezione w %s"
+
+msgid "SSH access is disabled."
+msgstr "Dostęp SSH jest wyłączony."
+
 msgid "None"
 msgstr "Brak"
 
 msgid "(closed)"
 msgstr "(zamknięty)"
 
-msgid "Show whitespace"
-msgstr "pokazuj spacje"
-
-msgid "Ignore whitespace"
-msgstr "Ignoruj pokazywanie spacji"
-
 msgid "Successfully deleted pull request %s"
 msgstr ""
 "Prośba o skasowanie połączenia gałęzi %s została wykonana prawidłowo"
@@ -170,9 +176,6 @@
 msgid "Successfully updated password"
 msgstr "Pomyślnie zaktualizowano hasło"
 
-msgid "Invalid reviewer \"%s\" specified"
-msgstr "Podano nieprawidłowego recenzenta \"%\""
-
 msgid "%s (closed)"
 msgstr "%s (zamknięty)"
 
@@ -444,12 +447,6 @@
 msgid "Updated VCS settings"
 msgstr "Aktualizacja ustawień VCS"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"Nie można włączyć obsługi hgsubversion. Brak biblioteki „hgsubversion”"
-
 msgid "Error occurred while updating application settings"
 msgstr "Wystąpił błąd podczas aktualizacji ustawień aplikacji"
 
@@ -551,18 +548,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Musisz być zalogowany, żeby oglądać stronę"
 
-msgid ""
-"CSRF token leak has been detected - all form tokens have been expired"
-msgstr ""
-"Wykryto wyciek tokenu CSRF — wszystkie tokeny formularza zostały "
-"unieważnione"
-
-msgid "Changeset for %s %s not found in %s"
-msgstr "Zmiany dla %s %s nie zostały znalezione w %s"
-
-msgid "SSH access is disabled."
-msgstr "Dostęp SSH jest wyłączony."
-
 msgid "Binary file"
 msgstr "Plik binarny"
 
@@ -683,41 +668,9 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"%s repozytorium nie jest mapowane do db może zostało utworzone lub "
-"zmienione z systemie plików proszę uruchomić aplikację ponownie, aby "
-"ponownie przeskanować repozytoria"
-
 msgid "SSH key is missing"
 msgstr "Brak klucza SSH"
 
-msgid ""
-"Incorrect SSH key - it must have both a key type and a base64 part, like "
-"'ssh-rsa ASRNeaZu4FA...xlJp='"
-msgstr ""
-"Nieprawidłowy klucz SSH - musi mieć zarówno typ, jak i część kodowaną "
-"base64, na przykład „ssh-rsa ASRNeaZu4FA ... xlJp=”"
-
-msgid "Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'"
-msgstr ""
-"Nieprawidłowy klucz SSH - musi zaczynać się od 'ssh-(rsa | dss | ed25519)'"
-
-msgid "Incorrect SSH key - unexpected characters in base64 part %r"
-msgstr ""
-"Nieprawidłowy klucz SSH - nieoczekiwane znaki w części kodowanej base64 %r"
-
-msgid "Incorrect SSH key - failed to decode base64 part %r"
-msgstr "Nieprawidłowy klucz SSH - nie udało się zdekodować części base64 %r"
-
-msgid "Incorrect SSH key - base64 part is not %r as claimed but %r"
-msgstr ""
-"Nieprawidłowy klucz SSH - część kodowana base64 nie jest %r jak podano, "
-"ale %r"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d rok"
@@ -769,12 +722,6 @@
 msgid "just now"
 msgstr "przed chwilą"
 
-msgid "on line %s"
-msgstr "widziany %s"
-
-msgid "[Mention]"
-msgstr "[Wymieniony]"
-
 msgid "top level"
 msgstr "najwyższy poziom"
 
@@ -822,13 +769,6 @@
 msgstr ""
 "Domyślny użytkownik ma dostęp administracyjny do nowych grup użytkowników"
 
-msgid "Only admins can create repository groups"
-msgstr "Tylko administratorzy mogą tworzyć grupy repozytoriów"
-
-msgid "Non-admins can create repository groups"
-msgstr ""
-"Użytkownicy bez uprawnień administratora mogą tworzyć grupy repozytoriów"
-
 msgid "Only admins can create user groups"
 msgstr "Tylko administratorzy mogą tworzyć grupy użytkowników"
 
@@ -880,13 +820,6 @@
 msgid "Name must not contain only digits"
 msgstr "Nazwa nie może zawierać samych cyfr"
 
-msgid ""
-"[Comment] %(repo_name)s changeset %(short_id)s \"%(message_short)s\" on "
-"%(branch)s"
-msgstr ""
-"[Komentarz] %(repo_name)s zmiana %(short_id)s \"%(message_short)s\" w "
-"%(branch)s"
-
 msgid "New user %(new_username)s registered"
 msgstr "Użytkownik %(new_username)s zarejestrował się"
 
@@ -902,9 +835,6 @@
 msgid "SSH key with fingerprint %r found"
 msgstr "Znaleziono klucz SSH z odciskiem palca %r"
 
-msgid "New user registration"
-msgstr "nowy użytkownik się zarejestrował"
-
 msgid ""
 "You can't remove this user since it is crucial for the entire application"
 msgstr ""
@@ -989,13 +919,6 @@
 msgid "Invalid repository URL"
 msgstr "Nieprawidłowy adres URL repozytorium"
 
-msgid ""
-"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
-"svn+https URL"
-msgstr ""
-"Nieprawidłowy adres URL repozytorium. Musi to być prawidłowy adres URL "
-"typu http, https, ssh, svn + http lub svn + https"
-
 msgid "Fork has to be the same type as parent"
 msgstr "Fork musi być tego samego typu, jak rodzic"
 
@@ -1088,10 +1011,10 @@
 msgid "Stay logged in after browser restart"
 msgstr "Pozostań zalogowany po ponownym uruchomieniu przeglądarki"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "Zapomniałeś hasła?"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "Nie masz konta?"
 
 msgid "Sign In"
@@ -1681,9 +1604,6 @@
 msgid "Enable largefiles extension"
 msgstr "Rozszerzenia dużych plików"
 
-msgid "Enable hgsubversion extension"
-msgstr "Rozszerzenia hgsubversion"
-
 msgid ""
 "Click to unlock. You must restart Kallithea in order to make this setting "
 "take effect."
@@ -2034,6 +1954,9 @@
 msgid "File diff"
 msgstr "Pliki różnic"
 
+msgid "Ignore whitespace"
+msgstr "Ignoruj pokazywanie spacji"
+
 msgid "%s File Diff"
 msgstr "%s Pliki różnic"
 
--- a/kallithea/i18n/pt/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/pt/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -19,12 +19,6 @@
 msgid "(closed)"
 msgstr "(fechado)"
 
-msgid "Show whitespace"
-msgstr "Mostrar espaços em branco"
-
-msgid "Ignore whitespace"
-msgstr "Ignorar espaços em branco"
-
 msgid ""
 "The request could not be understood by the server due to malformed syntax."
 msgstr ""
@@ -509,15 +503,6 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"O repositório %s não está mapeado ao BD. Talvez ele tenha sido criado ou "
-"renomeado a partir do sistema de ficheiros. Por favor, execute a "
-"aplicação outra vez para varrer novamente por repositórios"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d ano"
@@ -563,12 +548,6 @@
 msgid "just now"
 msgstr "agora há pouco"
 
-msgid "on line %s"
-msgstr "na linha %s"
-
-msgid "[Mention]"
-msgstr "[Menção]"
-
 msgid "top level"
 msgstr "nível superior"
 
@@ -596,9 +575,6 @@
 msgid "latest tip"
 msgstr "tip mais recente"
 
-msgid "New user registration"
-msgstr "Novo registo de utilizador"
-
 msgid "Password reset link"
 msgstr "Ligação para trocar palavra-passe"
 
@@ -718,11 +694,11 @@
 msgid "Password"
 msgstr "Palavra-passe"
 
-msgid "Forgot your password ?"
-msgstr "Esqueceu sua palavra-passe ?"
+msgid "Forgot your password?"
+msgstr "Esqueceu sua palavra-passe?"
 
-msgid "Don't have an account ?"
-msgstr "Não possui uma conta ?"
+msgid "Don't have an account?"
+msgstr "Não possui uma conta?"
 
 msgid "Sign In"
 msgstr "Entrar"
@@ -1011,9 +987,6 @@
 msgid "Enable largefiles extension"
 msgstr "Ativar extensão largefiles"
 
-msgid "Enable hgsubversion extension"
-msgstr "Ativar extensão hgsubversion"
-
 msgid ""
 "Click to unlock. You must restart Kallithea in order to make this setting "
 "take effect."
@@ -1355,6 +1328,9 @@
 msgid "File diff"
 msgstr "Diff do ficheiro"
 
+msgid "Ignore whitespace"
+msgstr "Ignorar espaços em branco"
+
 msgid "%s File Diff"
 msgstr "%s Diff de Ficheiro"
 
--- a/kallithea/i18n/pt_BR/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/pt_BR/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -19,12 +19,6 @@
 msgid "(closed)"
 msgstr "(fechado)"
 
-msgid "Show whitespace"
-msgstr "Mostrar espaços em branco"
-
-msgid "Ignore whitespace"
-msgstr "Ignorar espaços em branco"
-
 msgid ""
 "The request could not be understood by the server due to malformed syntax."
 msgstr ""
@@ -509,15 +503,6 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"O repositório %s não está mapeado ao BD. Talvez ele tenha sido criado ou "
-"renomeado a partir do sistema de arquivos. Por favor, execute a aplicação "
-"outra vez para varrer novamente por repositórios"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d ano"
@@ -563,12 +548,6 @@
 msgid "just now"
 msgstr "agora há pouco"
 
-msgid "on line %s"
-msgstr "na linha %s"
-
-msgid "[Mention]"
-msgstr "[Menção]"
-
 msgid "top level"
 msgstr "nível superior"
 
@@ -596,9 +575,6 @@
 msgid "latest tip"
 msgstr "tip mais recente"
 
-msgid "New user registration"
-msgstr "Novo registro de usuário"
-
 msgid "Password reset link"
 msgstr "Link para trocar senha"
 
@@ -718,11 +694,11 @@
 msgid "Password"
 msgstr "Senha"
 
-msgid "Forgot your password ?"
-msgstr "Esqueceu sua senha ?"
+msgid "Forgot your password?"
+msgstr "Esqueceu sua senha?"
 
-msgid "Don't have an account ?"
-msgstr "Não possui uma conta ?"
+msgid "Don't have an account?"
+msgstr "Não possui uma conta?"
 
 msgid "Sign In"
 msgstr "Entrar"
@@ -1009,9 +985,6 @@
 msgid "Enable largefiles extension"
 msgstr "Habilitar extensão largefiles"
 
-msgid "Enable hgsubversion extension"
-msgstr "Habilitar extensão hgsubversion"
-
 msgid ""
 "Click to unlock. You must restart Kallithea in order to make this setting "
 "take effect."
@@ -1353,6 +1326,9 @@
 msgid "File diff"
 msgstr "Diff do arquivo"
 
+msgid "Ignore whitespace"
+msgstr "Ignorar espaços em branco"
+
 msgid "%s File Diff"
 msgstr "%s Diff de Arquivo"
 
--- a/kallithea/i18n/ru/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/ru/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -11,24 +11,28 @@
 "Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
 "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr "Обнаружена утечка CSRF-токена — истёк срок действия токенов форм"
+
+msgid "Repository not found in the filesystem"
+msgstr "Репозиторий не найден на файловой системе"
+
 msgid "There are no changesets yet"
 msgstr "Наборы изменений отсутствуют"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Набор изменений для %s %s не найден в %s"
+
+msgid "SSH access is disabled."
+msgstr "Доступ по SSH отключен."
+
 msgid "None"
 msgstr "Ничего"
 
 msgid "(closed)"
 msgstr "(закрыто)"
 
-msgid "Show whitespace"
-msgstr "Отображать пробелы"
-
-msgid "Ignore whitespace"
-msgstr "Игнорировать пробелы"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Увеличить контекст до %(num)s строк"
-
 msgid "No permission to change status"
 msgstr "Недостаточно привилегий для изменения статуса"
 
@@ -530,13 +534,6 @@
 msgid "Updated VCS settings"
 msgstr "Обновлены настройки VCS"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"Невозможно включить поддержку hgsubversion. Библиотека «hgsubversion» "
-"отсутствует"
-
 msgid "Error occurred while updating application settings"
 msgstr "Произошла ошибка при обновлении настроек приложения"
 
@@ -564,11 +561,6 @@
 msgid "Hook already exists"
 msgstr "Хук уже существует"
 
-msgid "Builtin hooks are read-only. Please use another hook name."
-msgstr ""
-"Встроенные хуки предназначены только для чтения. Пожалуйста, используйте "
-"другое имя."
-
 msgid "Added new hook"
 msgstr "Добавлена новая ловушка"
 
@@ -646,19 +638,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Страница доступна только авторизованным пользователям"
 
-msgid ""
-"CSRF token leak has been detected - all form tokens have been expired"
-msgstr "Обнаружена утечка CSRF-токена — истёк срок действия токенов форм"
-
-msgid "Repository not found in the filesystem"
-msgstr "Репозиторий не найден на файловой системе"
-
-msgid "Changeset for %s %s not found in %s"
-msgstr "Набор изменений для %s %s не найден в %s"
-
-msgid "SSH access is disabled."
-msgstr "Доступ по SSH отключен."
-
 msgid "Binary file"
 msgstr "Двоичный файл"
 
@@ -671,6 +650,9 @@
 msgid "No changes detected"
 msgstr "Изменений не обнаружено"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Увеличить контекст до %(num)s строк"
+
 msgid "Deleted branch: %s"
 msgstr "Удалена ветка: %s"
 
@@ -782,39 +764,9 @@
 msgid "chmod"
 msgstr "chmod"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"Репозиторий %s отсутствует в базе данных; возможно, он был создан или "
-"переименован из файловой системы. Пожалуйста, перезапустите приложение "
-"для сканирования репозиториев"
-
 msgid "SSH key is missing"
 msgstr "Отсутствует ключ SSH"
 
-msgid ""
-"Incorrect SSH key - it must have both a key type and a base64 part, like "
-"'ssh-rsa ASRNeaZu4FA...xlJp='"
-msgstr ""
-"Некорректный ключ SSH — должен присутствовать тип ключа и код base64, "
-"например 'ssh-rsa ASRNeaZu4FA...xlJp='"
-
-msgid "Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'"
-msgstr ""
-"Некорректный ключ SSH — он должен начинаться с 'ssh-(rsa|dss|ed25519)'"
-
-msgid "Incorrect SSH key - unexpected characters in base64 part %r"
-msgstr ""
-"Некорректный ключ SSH — присутствуют некорректные символы в коде base64 %r"
-
-msgid "Incorrect SSH key - failed to decode base64 part %r"
-msgstr "Некорректный ключ SSH — ошибка декодирования кода base64 %r"
-
-msgid "Incorrect SSH key - base64 part is not %r as claimed but %r"
-msgstr "Некорректный ключ SSH — код base64 соответствует не %r, а %r"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d год"
@@ -866,12 +818,6 @@
 msgid "just now"
 msgstr "только что"
 
-msgid "on line %s"
-msgstr "на строке %s"
-
-msgid "[Mention]"
-msgstr "[Упоминание]"
-
 msgid "top level"
 msgstr "верхний уровень"
 
@@ -934,12 +880,6 @@
 "Неавторизованные пользователи имеют права администратора к новым групппам "
 "пользователей"
 
-msgid "Only admins can create repository groups"
-msgstr "Только администраторы могут создавать группы репозиториев"
-
-msgid "Non-admins can create repository groups"
-msgstr "Группы репозиториев могут создаваться любыми пользователями"
-
 msgid "Only admins can create user groups"
 msgstr "Группы пользователей могут создаваться только администраторами"
 
@@ -952,16 +892,6 @@
 msgid "Non-admins can create top level repositories"
 msgstr "Любой пользователь может создавать репозитории верхнего уровня"
 
-msgid ""
-"Repository creation enabled with write permission to a repository group"
-msgstr ""
-"Создание репозиториев доступно с правом на запись в группу репозиториев"
-
-msgid ""
-"Repository creation disabled with write permission to a repository group"
-msgstr ""
-"Создание репозиториев недоступно с правом на запись в группу репозиториев"
-
 msgid "Only admins can fork repositories"
 msgstr "Форки репозиториев могут создаваться только администраторами"
 
@@ -1004,13 +934,6 @@
 msgid "Name must not contain only digits"
 msgstr "Имя не может состоять только из цифр"
 
-msgid ""
-"[Comment] %(repo_name)s changeset %(short_id)s \"%(message_short)s\" on "
-"%(branch)s"
-msgstr ""
-"[Комментарий] к набору изменений %(short_id)s «%(message_short)s» "
-"репозитория %(repo_name)s в %(branch)s"
-
 msgid "New user %(new_username)s registered"
 msgstr "Новый пользователь \"%(new_username)s\" зарегистрирован"
 
@@ -1031,11 +954,6 @@
 msgid "Closing"
 msgstr "Закрыт"
 
-msgid ""
-"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
-msgstr ""
-"%(user)s просит вас рассмотреть pull-запрос %(pr_nice_id)s: %(pr_title)s"
-
 msgid "Cannot create empty pull request"
 msgstr "Невозможно создать пустой pull-запрос"
 
@@ -1082,9 +1000,6 @@
 msgid "SSH key with fingerprint %r found"
 msgstr "Найден ключ SSH с отпечатком %r"
 
-msgid "New user registration"
-msgstr "Регистрация нового пользователя"
-
 msgid ""
 "You can't remove this user since it is crucial for the entire application"
 msgstr ""
@@ -1197,13 +1112,6 @@
 msgid "Invalid repository URL"
 msgstr "Недопустимый URL репозитория"
 
-msgid ""
-"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
-"svn+https URL"
-msgstr ""
-"Недопустимый URL репозитория. Требуется корректный http, https, ssh, svn"
-"+http или svn+https URL"
-
 msgid "Fork has to be the same type as parent"
 msgstr "Форк будет иметь тот же тип, что и родительский"
 
@@ -1301,10 +1209,10 @@
 msgid "Stay logged in after browser restart"
 msgstr "Оставаться авторизованным"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "Забыли пароль?"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "Нет аккаунта?"
 
 msgid "Sign In"
@@ -1782,25 +1690,6 @@
 "Включите, чтобы разрешить всем пользователям создавать репозитории на "
 "верхнем уровне."
 
-msgid ""
-"Note: This will also give all users API access to create repositories "
-"everywhere. That might change in future versions."
-msgstr ""
-"Внимание: это также позволит всем пользователям с помощью API создавать "
-"репозитории где угодно. Это может измениться в будущих версиях."
-
-msgid "Repository creation with group write access"
-msgstr "Создание репозитория с правом записи в группы"
-
-msgid ""
-"With this, write permission to a repository group allows creating "
-"repositories inside that group. Without this, group write permissions "
-"mean nothing."
-msgstr ""
-"С этой опцией, право записи в группу репозиториев позволяет создавать "
-"репозитории в этой группе. Без неё, право записи в группу не имеет "
-"действия."
-
 msgid "User group creation"
 msgstr "Создание групп пользователей"
 
@@ -2229,12 +2118,6 @@
 msgid "Save Settings"
 msgstr "Сохранить настройки"
 
-msgid "Built-in Mercurial Hooks (Read-Only)"
-msgstr "Встроенные хуки Mercurial (только чтение)"
-
-msgid "Custom Hooks"
-msgstr "Пользовательские хуки"
-
 msgid ""
 "Hooks can be used to trigger actions on certain events such as push / "
 "pull. They can trigger Python functions or external applications."
@@ -2271,25 +2154,6 @@
 msgid "Install Git hooks"
 msgstr "Установить хуки Git"
 
-msgid ""
-"Verify if Kallithea's Git hooks are installed for each repository. "
-"Current hooks will be updated to the latest version."
-msgstr ""
-"Проверяет установку Git хуков от Kallithea у каждого репозитория. Текущие "
-"хуки будут обновлены до последней версии."
-
-msgid "Overwrite existing Git hooks"
-msgstr "Перезаписать существующие хуки"
-
-msgid ""
-"If installing Git hooks, overwrite any existing hooks, even if they do "
-"not seem to come from Kallithea. WARNING: This operation will destroy any "
-"custom git hooks you may have deployed by hand!"
-msgstr ""
-"Перезаписывает все существующие хуки при установке хуков Git, даже если "
-"они не поставляются с Kallithea. ПРЕДУПРЕЖДЕНИЕ: это действие уничтожит "
-"любые Git хуки, которые могли быть созданы вручную!"
-
 msgid "Rescan Repositories"
 msgstr "Пересканировать репозитории"
 
@@ -2345,16 +2209,6 @@
 msgid "Enable largefiles extension"
 msgstr "Включить поддержку больших файлов"
 
-msgid "Enable hgsubversion extension"
-msgstr "Включить поддержку hgsubversion"
-
-msgid ""
-"Requires hgsubversion library to be installed. Enables cloning of remote "
-"Subversion repositories while converting them to Mercurial."
-msgstr ""
-"Требует наличия библиотеки hgsubversion. Включает клонирование удалённых "
-"репозиториев Subversion с последующим конвертированием в Mercurial."
-
 msgid "Location of repositories"
 msgstr "Местонахождение репозиториев"
 
@@ -2711,9 +2565,6 @@
 msgid "Forgot password?"
 msgstr "Забыли пароль?"
 
-msgid "Don't have an account?"
-msgstr "Нет аккаунта?"
-
 msgid "Log Out"
 msgstr "Выход"
 
@@ -2819,8 +2670,8 @@
 msgid "Failed to revoke permission"
 msgstr "Не удалось отозвать привилегии"
 
-msgid "Confirm to revoke permission for {0}: {1} ?"
-msgstr "Подтвердите удаление привилегии для {0}: {1} ?"
+msgid "Confirm to revoke permission for {0}: {1}?"
+msgstr "Подтвердите удаление привилегии для {0}: {1}?"
 
 msgid "Select changeset"
 msgstr "Выбрать набор изменений"
@@ -3240,6 +3091,9 @@
 msgid "File diff"
 msgstr "Сравнение файлов"
 
+msgid "Ignore whitespace"
+msgstr "Игнорировать пробелы"
+
 msgid "%s File Diff"
 msgstr "Сравнение файла %s"
 
--- a/kallithea/i18n/sk/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/sk/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -16,12 +16,6 @@
 msgid "(closed)"
 msgstr "(zatvorené)"
 
-msgid "Show whitespace"
-msgstr "Ukázať medzery"
-
-msgid "Ignore whitespace"
-msgstr "Ignorovať medzery"
-
 msgid "Such revision does not exist for this repository"
 msgstr "Taká revízia neexistuje"
 
@@ -136,6 +130,9 @@
 msgid "Delete this Repository"
 msgstr "Vymazať tento repozitár"
 
+msgid "Ignore whitespace"
+msgstr "Ignorovať medzery"
+
 msgid "Cancel Changes"
 msgstr "Zrušiť zmeny"
 
--- a/kallithea/i18n/tr/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/tr/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -15,6 +15,3 @@
 
 msgid "(closed)"
 msgstr "(kapalı)"
-
-msgid "Show whitespace"
-msgstr "Boşlukları göster"
--- a/kallithea/i18n/uk/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/uk/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -11,24 +11,25 @@
 "Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
 "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
 
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr "Виявлено витік токенів CSRF - всі маркери форми минули"
+
+msgid "Repository not found in the filesystem"
+msgstr "Репозиторій не знайдено у файловій системі"
+
 msgid "There are no changesets yet"
 msgstr "Наборів змін немає"
 
+msgid "Changeset for %s %s not found in %s"
+msgstr "Набір змін для %s %s не знайдено в %s"
+
 msgid "None"
 msgstr "Нічого"
 
 msgid "(closed)"
 msgstr "(закрито)"
 
-msgid "Show whitespace"
-msgstr "Відображати пробіли"
-
-msgid "Ignore whitespace"
-msgstr "Ігнорувати пробіли"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "Збільшити відмінність контексту для %(num)s рядків"
-
 msgid "No permission to change status"
 msgstr "У вас немає дозволу змінювати статус"
 
@@ -515,13 +516,6 @@
 msgid "Updated VCS settings"
 msgstr "Оновлені налаштування VCS"
 
-msgid ""
-"Unable to activate hgsubversion support. The \"hgsubversion\" library is "
-"missing"
-msgstr ""
-"Не вдається активувати підтримку hgsubversion. Бібліотека \"hgsubversion"
-"\" відсутня"
-
 msgid "Error occurred while updating application settings"
 msgstr "Під час оновлення параметрів застосунку сталася помилка"
 
@@ -549,11 +543,6 @@
 msgid "Hook already exists"
 msgstr "Hook вже існує"
 
-msgid "Builtin hooks are read-only. Please use another hook name."
-msgstr ""
-"Вбудовані hooks доступні лише для читання. Будь ласка, використовуйте "
-"інше ім'я hook."
-
 msgid "Added new hook"
 msgstr "Додано новий hook"
 
@@ -629,16 +618,6 @@
 msgid "You need to be signed in to view this page"
 msgstr "Ви повинні бути зареєстровані для перегляду цієї сторінки"
 
-msgid ""
-"CSRF token leak has been detected - all form tokens have been expired"
-msgstr "Виявлено витік токенів CSRF - всі маркери форми минули"
-
-msgid "Repository not found in the filesystem"
-msgstr "Репозиторій не знайдено у файловій системі"
-
-msgid "Changeset for %s %s not found in %s"
-msgstr "Набір змін для %s %s не знайдено в %s"
-
 msgid "Binary file"
 msgstr "Двійковий файл"
 
@@ -651,6 +630,9 @@
 msgid "No changes detected"
 msgstr "Не виявлено змін"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "Збільшити відмінність контексту для %(num)s рядків"
+
 msgid "Deleted branch: %s"
 msgstr "Видалено гілку: %s"
 
@@ -765,12 +747,6 @@
 msgid "just now"
 msgstr "прямо зараз"
 
-msgid "on line %s"
-msgstr "в рядку %s"
-
-msgid "[Mention]"
-msgstr "[Згадування]"
-
 msgid "top level"
 msgstr "верхній рівень"
 
@@ -796,6 +772,9 @@
 msgstr ""
 "Користувач за замовчуванням не має доступу до нових груп репозиторіїв"
 
+msgid "Don't have an account?"
+msgstr "Не маєте облікового запису?"
+
 msgid "Create Public Gist"
 msgstr "Створити публічний GIST"
 
@@ -962,9 +941,6 @@
 msgstr ""
 "Дозволи для користувача за замовчуванням для нових груп користувачів."
 
-msgid "Repository creation with group write access"
-msgstr "Створення сховища з доступом до групового записування"
-
 msgid "User group creation"
 msgstr "Створення групи користувачів"
 
@@ -1321,12 +1297,6 @@
 msgid "Save Settings"
 msgstr "Зберегти налаштування"
 
-msgid "Built-in Mercurial Hooks (Read-Only)"
-msgstr "Вбудовані хуки Mercurial (лише для читання)"
-
-msgid "Custom Hooks"
-msgstr "Користувацькі хуки"
-
 msgid "Failed to remove hook"
 msgstr "Не вдалося видалити хук"
 
@@ -1355,25 +1325,6 @@
 msgid "Install Git hooks"
 msgstr "Встановити Git хуки"
 
-msgid ""
-"Verify if Kallithea's Git hooks are installed for each repository. "
-"Current hooks will be updated to the latest version."
-msgstr ""
-"Перевірити, чи є в Git хуки для кожного репозиторію. Поточні хуки буде "
-"оновлено до останньої версії."
-
-msgid "Overwrite existing Git hooks"
-msgstr "Перезаписати існуючі хуки Git"
-
-msgid ""
-"If installing Git hooks, overwrite any existing hooks, even if they do "
-"not seem to come from Kallithea. WARNING: This operation will destroy any "
-"custom git hooks you may have deployed by hand!"
-msgstr ""
-"При установці Git хуків, перезаписати будь-які існуючі хуки, навіть якщо "
-"вони, здається, не приходять з Каллітея. Увага: ця операція знищить будь-"
-"які користувацькі хуки Git які ви, можливо, розгорнули вручну!"
-
 msgid "Rescan Repositories"
 msgstr "Пересканувати Репозиторії"
 
@@ -1429,16 +1380,6 @@
 msgid "Enable largefiles extension"
 msgstr "Увімкнути розширення largefiles"
 
-msgid "Enable hgsubversion extension"
-msgstr "Увімкнути розширення hgsubversion"
-
-msgid ""
-"Requires hgsubversion library to be installed. Enables cloning of remote "
-"Subversion repositories while converting them to Mercurial."
-msgstr ""
-"Потрібна установка бібліотеки hgsubversion. Дозволяє клонувати віддалені "
-"сховища Subversion під час перетворення їх у Mercurial."
-
 msgid "Location of repositories"
 msgstr "Розташування репозиторіїв"
 
@@ -1562,9 +1503,6 @@
 msgid "Forgot password?"
 msgstr "Забули пароль?"
 
-msgid "Don't have an account?"
-msgstr "Не маєте облікового запису?"
-
 msgid "Log Out"
 msgstr "Вийти"
 
@@ -1700,6 +1638,9 @@
 msgid "Status change:"
 msgstr "Зміна статусу:"
 
+msgid "Ignore whitespace"
+msgstr "Ігнорувати пробіли"
+
 msgid "files"
 msgstr "файли"
 
--- a/kallithea/i18n/zh_CN/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/zh_CN/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -19,15 +19,6 @@
 msgid "(closed)"
 msgstr "(已关闭)"
 
-msgid "Show whitespace"
-msgstr "显示空白"
-
-msgid "Ignore whitespace"
-msgstr "忽略空白"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "增加差异上下文到 %(num)s 行"
-
 msgid "Such revision does not exist for this repository"
 msgstr "在此代码库内,此修改并不存在"
 
@@ -398,6 +389,9 @@
 msgid "No changes detected"
 msgstr "未发现差异"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "增加差异上下文到 %(num)s 行"
+
 msgid "Deleted branch: %s"
 msgstr "已经删除分支%s"
 
@@ -473,14 +467,6 @@
 msgid "No files"
 msgstr "无文件"
 
-msgid ""
-"%s repository is not mapped to db perhaps it was created or renamed from "
-"the filesystem please run the application again in order to rescan "
-"repositories"
-msgstr ""
-"版本库%s没有映射到数据库,可能是从文件系统创建或者重命名,请重启Kallithea"
-"以重新扫描版本库"
-
 msgid "%d year"
 msgid_plural "%d years"
 msgstr[0] "%d年"
@@ -520,12 +506,6 @@
 msgid "just now"
 msgstr "刚才"
 
-msgid "on line %s"
-msgstr "在%s行"
-
-msgid "[Mention]"
-msgstr "[提及]"
-
 msgid "Kallithea Administrator"
 msgstr "Kallithea 管理员"
 
@@ -615,10 +595,10 @@
 msgid "Password"
 msgstr "密码"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "忘记了密码?"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "还没有帐号?"
 
 msgid "Sign In"
@@ -1009,6 +989,9 @@
 msgid "File diff"
 msgstr "文件差异"
 
+msgid "Ignore whitespace"
+msgstr "忽略空白"
+
 msgid "Location"
 msgstr "位置"
 
--- a/kallithea/i18n/zh_TW/LC_MESSAGES/kallithea.po	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/i18n/zh_TW/LC_MESSAGES/kallithea.po	Thu May 27 21:27:37 2021 +0200
@@ -16,15 +16,6 @@
 msgid "(closed)"
 msgstr "(已關閉)"
 
-msgid "Show whitespace"
-msgstr "顯示空格"
-
-msgid "Ignore whitespace"
-msgstr "忽略空格"
-
-msgid "Increase diff context to %(num)s lines"
-msgstr "增加 diff 上下文至 %(num)s 行"
-
 msgid "No response"
 msgstr "未回應"
 
@@ -136,6 +127,9 @@
 msgid "No changes detected"
 msgstr "尚未有任何變更"
 
+msgid "Increase diff context to %(num)s lines"
+msgstr "增加 diff 上下文至 %(num)s 行"
+
 msgid "and"
 msgstr "和"
 
@@ -206,10 +200,10 @@
 msgid "Password"
 msgstr "密碼"
 
-msgid "Forgot your password ?"
+msgid "Forgot your password?"
 msgstr "忘記您的密碼?"
 
-msgid "Don't have an account ?"
+msgid "Don't have an account?"
 msgstr "沒有帳號?"
 
 msgid "Sign In"
@@ -389,6 +383,9 @@
 msgid "File diff"
 msgstr "檔案差異"
 
+msgid "Ignore whitespace"
+msgstr "忽略空格"
+
 msgid "Location"
 msgstr "位置"
 
--- a/kallithea/lib/annotate.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/annotate.py	Thu May 27 21:27:37 2021 +0200
@@ -28,6 +28,7 @@
 from pygments import highlight
 from pygments.formatters import HtmlFormatter
 
+from kallithea.lib.pygmentsutils import get_custom_lexer
 from kallithea.lib.vcs.exceptions import VCSError
 from kallithea.lib.vcs.nodes import FileNode
 from kallithea.lib.vcs.utils import safe_str
@@ -48,7 +49,6 @@
     :param headers: dictionary with headers (keys are whats in ``order``
       parameter)
     """
-    from kallithea.lib.pygmentsutils import get_custom_lexer
     options['linenos'] = True
     formatter = AnnotateHtmlFormatter(filenode=filenode,
         annotate_from_changeset_func=annotate_from_changeset_func, order=order,
--- a/kallithea/lib/auth.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/auth.py	Thu May 27 21:27:37 2021 +0200
@@ -24,13 +24,9 @@
 :copyright: (c) 2013 RhodeCode GmbH, and others.
 :license: GPLv3, see LICENSE.md for more details.
 """
-import hashlib
 import itertools
 import logging
-import os
-import string
 
-import bcrypt
 import ipaddr
 from decorator import decorator
 from sqlalchemy.orm import joinedload
@@ -40,309 +36,29 @@
 from webob.exc import HTTPForbidden, HTTPFound
 
 import kallithea
-from kallithea.config.routing import url
+from kallithea.lib import webutils
 from kallithea.lib.utils import get_repo_group_slug, get_repo_slug, get_user_group_slug
-from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes
 from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.model.db import (Permission, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm,
-                                UserGroupUserGroupToPerm, UserIpMap, UserToPerm)
-from kallithea.model.meta import Session
+from kallithea.lib.webutils import url
+from kallithea.model import db, meta
 from kallithea.model.user import UserModel
 
 
 log = logging.getLogger(__name__)
 
 
-class PasswordGenerator(object):
-    """
-    This is a simple class for generating password from different sets of
-    characters
-    usage::
-
-        passwd_gen = PasswordGenerator()
-        #print 8-letter password containing only big and small letters
-            of alphabet
-        passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
-    """
-    ALPHABETS_NUM = r'''1234567890'''
-    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
-    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
-    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
-    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
-        + ALPHABETS_NUM + ALPHABETS_SPECIAL
-    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
-    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
-    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
-    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
-
-    def gen_password(self, length, alphabet=ALPHABETS_FULL):
-        assert len(alphabet) <= 256, alphabet
-        l = []
-        while len(l) < length:
-            i = ord(os.urandom(1))
-            if i < len(alphabet):
-                l.append(alphabet[i])
-        return ''.join(l)
-
-
-def get_crypt_password(password):
-    """
-    Cryptographic function used for bcrypt password hashing.
-
-    :param password: password to hash
-    """
-    return ascii_str(bcrypt.hashpw(safe_bytes(password), bcrypt.gensalt(10)))
-
-
-def check_password(password, hashed):
-    """
-    Checks password match the hashed value using bcrypt.
-    Remains backwards compatible and accept plain sha256 hashes which used to
-    be used on Windows.
-
-    :param password: password
-    :param hashed: password in hashed form
-    """
-    # sha256 hashes will always be 64 hex chars
-    # bcrypt hashes will always contain $ (and be shorter)
-    if len(hashed) == 64 and all(x in string.hexdigits for x in hashed):
-        return hashlib.sha256(password).hexdigest() == hashed
-    try:
-        return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed))
-    except ValueError as e:
-        # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
-        log.error('error from bcrypt checking password: %s', e)
-        return False
-    log.error('check_password failed - no method found for hash length %s', len(hashed))
-    return False
-
-
-def _cached_perms_data(user_id, user_is_admin):
-    RK = 'repositories'
-    GK = 'repositories_groups'
-    UK = 'user_groups'
-    GLOBAL = 'global'
-    PERM_WEIGHTS = Permission.PERM_WEIGHTS
-    permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()}
-
-    def bump_permission(kind, key, new_perm):
-        """Add a new permission for kind and key.
-        Assuming the permissions are comparable, set the new permission if it
-        has higher weight, else drop it and keep the old permission.
-        """
-        cur_perm = permissions[kind][key]
-        new_perm_val = PERM_WEIGHTS[new_perm]
-        cur_perm_val = PERM_WEIGHTS[cur_perm]
-        if new_perm_val > cur_perm_val:
-            permissions[kind][key] = new_perm
-
-    #======================================================================
-    # fetch default permissions
-    #======================================================================
-    default_repo_perms = Permission.get_default_perms(kallithea.DEFAULT_USER_ID)
-    default_repo_groups_perms = Permission.get_default_group_perms(kallithea.DEFAULT_USER_ID)
-    default_user_group_perms = Permission.get_default_user_group_perms(kallithea.DEFAULT_USER_ID)
-
-    if user_is_admin:
-        #==================================================================
-        # admin users have all rights;
-        # based on default permissions, just set everything to admin
-        #==================================================================
-        permissions[GLOBAL].add('hg.admin')
-        permissions[GLOBAL].add('hg.create.write_on_repogroup.true')
-
-        # repositories
-        for perm in default_repo_perms:
-            r_k = perm.repository.repo_name
-            p = 'repository.admin'
-            permissions[RK][r_k] = p
-
-        # repository groups
-        for perm in default_repo_groups_perms:
-            rg_k = perm.group.group_name
-            p = 'group.admin'
-            permissions[GK][rg_k] = p
-
-        # user groups
-        for perm in default_user_group_perms:
-            u_k = perm.user_group.users_group_name
-            p = 'usergroup.admin'
-            permissions[UK][u_k] = p
-        return permissions
-
-    #==================================================================
-    # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS
-    #==================================================================
-
-    # default global permissions taken from the default user
-    default_global_perms = UserToPerm.query() \
-        .filter(UserToPerm.user_id == kallithea.DEFAULT_USER_ID) \
-        .options(joinedload(UserToPerm.permission))
-
-    for perm in default_global_perms:
-        permissions[GLOBAL].add(perm.permission.permission_name)
-
-    # defaults for repositories, taken from default user
-    for perm in default_repo_perms:
-        r_k = perm.repository.repo_name
-        if perm.repository.owner_id == user_id:
-            p = 'repository.admin'
-        elif perm.repository.private:
-            p = 'repository.none'
-        else:
-            p = perm.permission.permission_name
-        permissions[RK][r_k] = p
+PERM_WEIGHTS = db.Permission.PERM_WEIGHTS
 
-    # defaults for repository groups taken from default user permission
-    # on given group
-    for perm in default_repo_groups_perms:
-        rg_k = perm.group.group_name
-        p = perm.permission.permission_name
-        permissions[GK][rg_k] = p
-
-    # defaults for user groups taken from default user permission
-    # on given user group
-    for perm in default_user_group_perms:
-        u_k = perm.user_group.users_group_name
-        p = perm.permission.permission_name
-        permissions[UK][u_k] = p
-
-    #======================================================================
-    # !! Augment GLOBALS with user permissions if any found !!
-    #======================================================================
-
-    # USER GROUPS comes first
-    # user group global permissions
-    user_perms_from_users_groups = Session().query(UserGroupToPerm) \
-        .options(joinedload(UserGroupToPerm.permission)) \
-        .join((UserGroupMember, UserGroupToPerm.users_group_id ==
-               UserGroupMember.users_group_id)) \
-        .filter(UserGroupMember.user_id == user_id) \
-        .join((UserGroup, UserGroupMember.users_group_id ==
-               UserGroup.users_group_id)) \
-        .filter(UserGroup.users_group_active == True) \
-        .order_by(UserGroupToPerm.users_group_id) \
-        .all()
-    # need to group here by groups since user can be in more than
-    # one group
-    _grouped = [[x, list(y)] for x, y in
-                itertools.groupby(user_perms_from_users_groups,
-                                  lambda x:x.users_group)]
-    for gr, perms in _grouped:
-        for perm in perms:
-            permissions[GLOBAL].add(perm.permission.permission_name)
-
-    # user specific global permissions
-    user_perms = Session().query(UserToPerm) \
-            .options(joinedload(UserToPerm.permission)) \
-            .filter(UserToPerm.user_id == user_id).all()
-
-    for perm in user_perms:
-        permissions[GLOBAL].add(perm.permission.permission_name)
-
-    # for each kind of global permissions, only keep the one with heighest weight
-    kind_max_perm = {}
-    for perm in sorted(permissions[GLOBAL], key=lambda n: PERM_WEIGHTS[n]):
-        kind = perm.rsplit('.', 1)[0]
-        kind_max_perm[kind] = perm
-    permissions[GLOBAL] = set(kind_max_perm.values())
-    ## END GLOBAL PERMISSIONS
-
-    #======================================================================
-    # !! PERMISSIONS FOR REPOSITORIES !!
-    #======================================================================
-    #======================================================================
-    # check if user is part of user groups for this repository and
-    # fill in his permission from it.
-    #======================================================================
-
-    # user group for repositories permissions
-    user_repo_perms_from_users_groups = \
-     Session().query(UserGroupRepoToPerm) \
-        .join((UserGroup, UserGroupRepoToPerm.users_group_id ==
-               UserGroup.users_group_id)) \
-        .filter(UserGroup.users_group_active == True) \
-        .join((UserGroupMember, UserGroupRepoToPerm.users_group_id ==
-               UserGroupMember.users_group_id)) \
-        .filter(UserGroupMember.user_id == user_id) \
-        .options(joinedload(UserGroupRepoToPerm.repository)) \
-        .options(joinedload(UserGroupRepoToPerm.permission)) \
-        .all()
-
-    for perm in user_repo_perms_from_users_groups:
-        bump_permission(RK,
-            perm.repository.repo_name,
-            perm.permission.permission_name)
-
-    # user permissions for repositories
-    user_repo_perms = Permission.get_default_perms(user_id)
-    for perm in user_repo_perms:
-        bump_permission(RK,
-            perm.repository.repo_name,
-            perm.permission.permission_name)
-
-    #======================================================================
-    # !! PERMISSIONS FOR REPOSITORY GROUPS !!
-    #======================================================================
-    #======================================================================
-    # check if user is part of user groups for this repository groups and
-    # fill in his permission from it.
-    #======================================================================
-    # user group for repo groups permissions
-    user_repo_group_perms_from_users_groups = \
-     Session().query(UserGroupRepoGroupToPerm) \
-     .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id ==
-            UserGroup.users_group_id)) \
-     .filter(UserGroup.users_group_active == True) \
-     .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id
-            == UserGroupMember.users_group_id)) \
-     .filter(UserGroupMember.user_id == user_id) \
-     .options(joinedload(UserGroupRepoGroupToPerm.permission)) \
-     .all()
-
-    for perm in user_repo_group_perms_from_users_groups:
-        bump_permission(GK,
-            perm.group.group_name,
-            perm.permission.permission_name)
-
-    # user explicit permissions for repository groups
-    user_repo_groups_perms = Permission.get_default_group_perms(user_id)
-    for perm in user_repo_groups_perms:
-        bump_permission(GK,
-            perm.group.group_name,
-            perm.permission.permission_name)
-
-    #======================================================================
-    # !! PERMISSIONS FOR USER GROUPS !!
-    #======================================================================
-    # user group for user group permissions
-    user_group_user_groups_perms = \
-     Session().query(UserGroupUserGroupToPerm) \
-     .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
-            == UserGroup.users_group_id)) \
-     .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
-            == UserGroupMember.users_group_id)) \
-     .filter(UserGroupMember.user_id == user_id) \
-     .join((UserGroup, UserGroupMember.users_group_id ==
-            UserGroup.users_group_id), aliased=True, from_joinpoint=True) \
-     .filter(UserGroup.users_group_active == True) \
-     .options(joinedload(UserGroupUserGroupToPerm.permission)) \
-     .all()
-
-    for perm in user_group_user_groups_perms:
-        bump_permission(UK,
-            perm.target_user_group.users_group_name,
-            perm.permission.permission_name)
-
-    # user explicit permission for user groups
-    user_user_groups_perms = Permission.get_default_user_group_perms(user_id)
-    for perm in user_user_groups_perms:
-        bump_permission(UK,
-            perm.user_group.users_group_name,
-            perm.permission.permission_name)
-
-    return permissions
-
+def bump_permission(permissions, key, new_perm):
+    """Add a new permission for key to permissions.
+    Assuming the permissions are comparable, set the new permission if it
+    has higher weight, else drop it and keep the old permission.
+    """
+    cur_perm = permissions[key]
+    new_perm_val = PERM_WEIGHTS[new_perm]
+    cur_perm_val = PERM_WEIGHTS[cur_perm]
+    if new_perm_val > cur_perm_val:
+        permissions[key] = new_perm
 
 class AuthUser(object):
     """
@@ -430,16 +146,206 @@
         log.debug('Auth User is now %s', self)
 
     @LazyProperty
+    def global_permissions(self):
+        log.debug('Getting global permissions for %s', self)
+
+        if self.is_admin:
+            return set(['hg.admin'])
+
+        global_permissions = set()
+
+        # default global permissions from the default user
+        default_global_perms = db.UserToPerm.query() \
+            .filter(db.UserToPerm.user_id == kallithea.DEFAULT_USER_ID) \
+            .options(joinedload(db.UserToPerm.permission))
+        for perm in default_global_perms:
+            global_permissions.add(perm.permission.permission_name)
+
+        # user group global permissions
+        user_perms_from_users_groups = meta.Session().query(db.UserGroupToPerm) \
+            .options(joinedload(db.UserGroupToPerm.permission)) \
+            .join((db.UserGroupMember, db.UserGroupToPerm.users_group_id ==
+                   db.UserGroupMember.users_group_id)) \
+            .filter(db.UserGroupMember.user_id == self.user_id) \
+            .join((db.UserGroup, db.UserGroupMember.users_group_id ==
+                   db.UserGroup.users_group_id)) \
+            .filter(db.UserGroup.users_group_active == True) \
+            .order_by(db.UserGroupToPerm.users_group_id) \
+            .all()
+        # need to group here by groups since user can be in more than
+        # one group
+        _grouped = [[x, list(y)] for x, y in
+                    itertools.groupby(user_perms_from_users_groups,
+                                      lambda x:x.users_group)]
+        for gr, perms in _grouped:
+            for perm in perms:
+                global_permissions.add(perm.permission.permission_name)
+
+        # user specific global permissions
+        user_perms = meta.Session().query(db.UserToPerm) \
+                .options(joinedload(db.UserToPerm.permission)) \
+                .filter(db.UserToPerm.user_id == self.user_id).all()
+        for perm in user_perms:
+            global_permissions.add(perm.permission.permission_name)
+
+        # for each kind of global permissions, only keep the one with heighest weight
+        kind_max_perm = {}
+        for perm in sorted(global_permissions, key=lambda n: PERM_WEIGHTS.get(n, -1)):
+            kind = perm.rsplit('.', 1)[0]
+            kind_max_perm[kind] = perm
+        return set(kind_max_perm.values())
+
+    @LazyProperty
+    def repository_permissions(self):
+        log.debug('Getting repository permissions for %s', self)
+        repository_permissions = {}
+        default_repo_perms = db.Permission.get_default_perms(kallithea.DEFAULT_USER_ID)
+
+        if self.is_admin:
+            for perm in default_repo_perms:
+                r_k = perm.repository.repo_name
+                p = 'repository.admin'
+                repository_permissions[r_k] = p
+
+        else:
+            # defaults for repositories from default user
+            for perm in default_repo_perms:
+                r_k = perm.repository.repo_name
+                if perm.repository.owner_id == self.user_id:
+                    p = 'repository.admin'
+                elif perm.repository.private:
+                    p = 'repository.none'
+                else:
+                    p = perm.permission.permission_name
+                repository_permissions[r_k] = p
+
+            # user group repository permissions
+            user_repo_perms_from_users_groups = \
+             meta.Session().query(db.UserGroupRepoToPerm) \
+                .join((db.UserGroup, db.UserGroupRepoToPerm.users_group_id ==
+                       db.UserGroup.users_group_id)) \
+                .filter(db.UserGroup.users_group_active == True) \
+                .join((db.UserGroupMember, db.UserGroupRepoToPerm.users_group_id ==
+                       db.UserGroupMember.users_group_id)) \
+                .filter(db.UserGroupMember.user_id == self.user_id) \
+                .options(joinedload(db.UserGroupRepoToPerm.repository)) \
+                .options(joinedload(db.UserGroupRepoToPerm.permission)) \
+                .all()
+            for perm in user_repo_perms_from_users_groups:
+                bump_permission(repository_permissions,
+                    perm.repository.repo_name,
+                    perm.permission.permission_name)
+
+            # user permissions for repositories
+            user_repo_perms = db.Permission.get_default_perms(self.user_id)
+            for perm in user_repo_perms:
+                bump_permission(repository_permissions,
+                    perm.repository.repo_name,
+                    perm.permission.permission_name)
+
+        return repository_permissions
+
+    @LazyProperty
+    def repository_group_permissions(self):
+        log.debug('Getting repository group permissions for %s', self)
+        repository_group_permissions = {}
+        default_repo_groups_perms = db.Permission.get_default_group_perms(kallithea.DEFAULT_USER_ID)
+
+        if self.is_admin:
+            for perm in default_repo_groups_perms:
+                rg_k = perm.group.group_name
+                p = 'group.admin'
+                repository_group_permissions[rg_k] = p
+
+        else:
+            # defaults for repository groups taken from default user permission
+            # on given group
+            for perm in default_repo_groups_perms:
+                rg_k = perm.group.group_name
+                p = perm.permission.permission_name
+                repository_group_permissions[rg_k] = p
+
+            # user group for repo groups permissions
+            user_repo_group_perms_from_users_groups = \
+                meta.Session().query(db.UserGroupRepoGroupToPerm) \
+                .join((db.UserGroup, db.UserGroupRepoGroupToPerm.users_group_id ==
+                       db.UserGroup.users_group_id)) \
+                .filter(db.UserGroup.users_group_active == True) \
+                .join((db.UserGroupMember, db.UserGroupRepoGroupToPerm.users_group_id
+                       == db.UserGroupMember.users_group_id)) \
+                .filter(db.UserGroupMember.user_id == self.user_id) \
+                .options(joinedload(db.UserGroupRepoGroupToPerm.permission)) \
+                .all()
+            for perm in user_repo_group_perms_from_users_groups:
+                bump_permission(repository_group_permissions,
+                    perm.group.group_name,
+                    perm.permission.permission_name)
+
+            # user explicit permissions for repository groups
+            user_repo_groups_perms = db.Permission.get_default_group_perms(self.user_id)
+            for perm in user_repo_groups_perms:
+                bump_permission(repository_group_permissions,
+                    perm.group.group_name,
+                    perm.permission.permission_name)
+
+        return repository_group_permissions
+
+    @LazyProperty
+    def user_group_permissions(self):
+        log.debug('Getting user group permissions for %s', self)
+        user_group_permissions = {}
+        default_user_group_perms = db.Permission.get_default_user_group_perms(kallithea.DEFAULT_USER_ID)
+
+        if self.is_admin:
+            for perm in default_user_group_perms:
+                u_k = perm.user_group.users_group_name
+                p = 'usergroup.admin'
+                user_group_permissions[u_k] = p
+
+        else:
+            # defaults for user groups taken from default user permission
+            # on given user group
+            for perm in default_user_group_perms:
+                u_k = perm.user_group.users_group_name
+                p = perm.permission.permission_name
+                user_group_permissions[u_k] = p
+
+            # user group for user group permissions
+            user_group_user_groups_perms = \
+                meta.Session().query(db.UserGroupUserGroupToPerm) \
+                .join((db.UserGroup, db.UserGroupUserGroupToPerm.target_user_group_id
+                       == db.UserGroup.users_group_id)) \
+                .join((db.UserGroupMember, db.UserGroupUserGroupToPerm.user_group_id
+                       == db.UserGroupMember.users_group_id)) \
+                .filter(db.UserGroupMember.user_id == self.user_id) \
+                .join((db.UserGroup, db.UserGroupMember.users_group_id ==
+                       db.UserGroup.users_group_id), aliased=True, from_joinpoint=True) \
+                .filter(db.UserGroup.users_group_active == True) \
+                .options(joinedload(db.UserGroupUserGroupToPerm.permission)) \
+                .all()
+            for perm in user_group_user_groups_perms:
+                bump_permission(user_group_permissions,
+                    perm.target_user_group.users_group_name,
+                    perm.permission.permission_name)
+
+            # user explicit permission for user groups
+            user_user_groups_perms = db.Permission.get_default_user_group_perms(self.user_id)
+            for perm in user_user_groups_perms:
+                bump_permission(user_group_permissions,
+                    perm.user_group.users_group_name,
+                    perm.permission.permission_name)
+
+        return user_group_permissions
+
+    @LazyProperty
     def permissions(self):
-        """
-        Fills user permission attribute with permissions taken from database
-        works for permissions given for repositories, and for permissions that
-        are granted to groups
-
-        :param user: `AuthUser` instance
-        """
-        log.debug('Getting PERMISSION tree for %s', self)
-        return _cached_perms_data(self.user_id, self.is_admin)
+        """dict with all 4 kind of permissions - mainly for backwards compatibility"""
+        return {
+            'global': self.global_permissions,
+            'repositories': self.repository_permissions,
+            'repositories_groups': self.repository_group_permissions,
+            'user_groups': self.user_group_permissions,
+        }
 
     def has_repository_permission_level(self, repo_name, level, purpose=None):
         required_perms = {
@@ -447,7 +353,7 @@
             'write': ['repository.write', 'repository.admin'],
             'admin': ['repository.admin'],
         }[level]
-        actual_perm = self.permissions['repositories'].get(repo_name)
+        actual_perm = self.repository_permissions.get(repo_name)
         ok = actual_perm in required_perms
         log.debug('Checking if user %r can %r repo %r (%s): %s (has %r)',
             self.username, level, repo_name, purpose, ok, actual_perm)
@@ -459,7 +365,7 @@
             'write': ['group.write', 'group.admin'],
             'admin': ['group.admin'],
         }[level]
-        actual_perm = self.permissions['repositories_groups'].get(repo_group_name)
+        actual_perm = self.repository_group_permissions.get(repo_group_name)
         ok = actual_perm in required_perms
         log.debug('Checking if user %r can %r repo group %r (%s): %s (has %r)',
             self.username, level, repo_group_name, purpose, ok, actual_perm)
@@ -471,7 +377,7 @@
             'write': ['usergroup.write', 'usergroup.admin'],
             'admin': ['usergroup.admin'],
         }[level]
-        actual_perm = self.permissions['user_groups'].get(user_group_name)
+        actual_perm = self.user_group_permissions.get(user_group_name)
         ok = actual_perm in required_perms
         log.debug('Checking if user %r can %r user group %r (%s): %s (has %r)',
             self.username, level, user_group_name, purpose, ok, actual_perm)
@@ -483,7 +389,7 @@
 
     def _get_api_keys(self):
         api_keys = [self.api_key]
-        for api_key in UserApiKeys.query() \
+        for api_key in db.UserApiKeys.query() \
                 .filter_by(user_id=self.user_id, is_expired=False):
             api_keys.append(api_key.api_key)
 
@@ -498,7 +404,7 @@
         """
         Returns list of repositories you're an admin of
         """
-        return [x[0] for x in self.permissions['repositories'].items()
+        return [x[0] for x in self.repository_permissions.items()
                 if x[1] == 'repository.admin']
 
     @property
@@ -506,7 +412,7 @@
         """
         Returns list of repository groups you're an admin of
         """
-        return [x[0] for x in self.permissions['repositories_groups'].items()
+        return [x[0] for x in self.repository_group_permissions.items()
                 if x[1] == 'group.admin']
 
     @property
@@ -514,7 +420,7 @@
         """
         Returns list of user groups you're an admin of
         """
-        return [x[0] for x in self.permissions['user_groups'].items()
+        return [x[0] for x in self.user_group_permissions.items()
                 if x[1] == 'usergroup.admin']
 
     def __repr__(self):
@@ -542,7 +448,7 @@
     def get_allowed_ips(cls, user_id):
         _set = set()
 
-        default_ips = UserIpMap.query().filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID)
+        default_ips = db.UserIpMap.query().filter(db.UserIpMap.user_id == kallithea.DEFAULT_USER_ID)
         for ip in default_ips:
             try:
                 _set.add(ip.ip_addr)
@@ -551,7 +457,7 @@
                 # deleted objects here, we just skip them
                 pass
 
-        user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
+        user_ips = db.UserIpMap.query().filter(db.UserIpMap.user_id == user_id)
         for ip in user_ips:
             try:
                 _set.add(ip.ip_addr)
@@ -561,6 +467,16 @@
                 pass
         return _set or set(['0.0.0.0/0', '::/0'])
 
+    def get_all_user_repos(self):
+        """
+        Gets all repositories that user have at least read access
+        """
+        repos = [repo_name
+            for repo_name, perm in self.repository_permissions.items()
+            if perm in ['repository.read', 'repository.write', 'repository.admin']
+            ]
+        return db.Repository.query().filter(db.Repository.repo_name.in_(repos))
+
 
 #==============================================================================
 # CHECK DECORATORS
@@ -570,9 +486,8 @@
     """Return an exception that must be raised. It will redirect to the login
     page which will redirect back to the current URL after authentication.
     The optional message will be shown in a flash message."""
-    from kallithea.lib import helpers as h
     if message:
-        h.flash(message, category='warning')
+        webutils.flash(message, category='warning')
     p = request.path_qs
     log.debug('Redirecting to login page, origin: %s', p)
     return HTTPFound(location=url('login_home', came_from=p))
@@ -673,8 +588,7 @@
     """
 
     def check_permissions(self, user):
-        global_permissions = user.permissions['global'] # usually very short
-        return any(p in global_permissions for p in self.required_perms)
+        return any(p in user.global_permissions for p in self.required_perms)
 
 
 class _PermDecorator(_PermsDecorator):
@@ -740,8 +654,7 @@
 class HasPermissionAny(_PermsFunction):
 
     def __call__(self, purpose=None):
-        global_permissions = request.authuser.permissions['global'] # usually very short
-        ok = any(p in global_permissions for p in self.required_perms)
+        ok = any(p in request.authuser.global_permissions for p in self.required_perms)
 
         log.debug('Check %s for global %s (%s): %s',
             request.authuser.username, self.required_perms, purpose, ok)
@@ -784,7 +697,7 @@
 
     def __call__(self, authuser, repo_name, purpose=None):
         try:
-            ok = authuser.permissions['repositories'][repo_name] in self.required_perms
+            ok = authuser.repository_permissions[repo_name] in self.required_perms
         except KeyError:
             ok = False
 
--- a/kallithea/lib/auth_modules/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/auth_modules/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -18,12 +18,12 @@
 import importlib
 import logging
 import traceback
+from inspect import isfunction
 
-from kallithea.lib.auth import AuthUser, PasswordGenerator
+from kallithea.lib.auth import AuthUser
 from kallithea.lib.compat import hybrid_property
-from kallithea.lib.utils2 import str2bool
-from kallithea.model.db import Setting, User
-from kallithea.model.meta import Session
+from kallithea.lib.utils2 import PasswordGenerator, asbool
+from kallithea.model import db, meta, validators
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
 
@@ -38,7 +38,6 @@
         self.kwargs = kwargs
 
     def __call__(self, *args, **kwargs):
-        from inspect import isfunction
         formencode_obj = self.formencode_obj
         if isfunction(formencode_obj):
             # case we wrap validators into functions
@@ -69,8 +68,7 @@
                 self.validator_name = name
 
             def __call__(self, *args, **kwargs):
-                from kallithea.model import validators as v
-                obj = getattr(v, self.validator_name)
+                obj = getattr(validators, self.validator_name)
                 #log.debug('Initializing lazy formencode object: %s', obj)
                 return LazyFormencode(obj, *args, **kwargs)
 
@@ -135,7 +133,7 @@
         log.debug('Trying to fetch user `%s` from Kallithea database',
                   username)
         if username:
-            user = User.get_by_username_or_email(username)
+            user = db.User.get_by_username_or_email(username)
         else:
             log.debug('provided username:`%s` is empty skipping...', username)
         return user
@@ -182,8 +180,8 @@
         OVERRIDING THIS METHOD WILL CAUSE YOUR PLUGIN TO FAIL.
         """
 
-        rcsettings = self.settings()
-        rcsettings.insert(0, {
+        settings = self.settings()
+        settings.insert(0, {
             "name": "enabled",
             "validator": self.validators.StringBoolean(if_missing=False),
             "type": "bool",
@@ -191,7 +189,7 @@
             "formname": "Enabled"
             }
         )
-        return rcsettings
+        return settings
 
     def auth(self, userobj, username, passwd, settings, **kwargs):
         """
@@ -240,7 +238,7 @@
             userobj, username, passwd, settings, **kwargs)
         if user_data is not None:
             if userobj is None: # external authentication of unknown user that will be created soon
-                def_user_perms = AuthUser(dbuser=User.get_default_user()).permissions['global']
+                def_user_perms = AuthUser(dbuser=db.User.get_default_user()).global_permissions
                 active = 'hg.extern_activate.auto' in def_user_perms
             else:
                 active = userobj.active
@@ -267,7 +265,7 @@
             # created from plugins. We store this info in _group_data JSON field
             groups = user_data['groups'] or []
             UserGroupModel().enforce_groups(user, groups, self.name)
-            Session().commit()
+            meta.Session().commit()
         return user_data
 
 
@@ -316,7 +314,7 @@
 def get_auth_plugins():
     """Return a list of instances of plugins that are available and enabled"""
     auth_plugins = []
-    for plugin_name in Setting.get_by_name("auth_plugins").app_settings_value:
+    for plugin_name in db.Setting.get_by_name("auth_plugins").app_settings_value:
         try:
             plugin = loadplugin(plugin_name)
         except Exception:
@@ -346,11 +344,11 @@
         plugin_settings = {}
         for v in plugin.plugin_settings():
             conf_key = "auth_%s_%s" % (plugin_name, v["name"])
-            setting = Setting.get_by_name(conf_key)
+            setting = db.Setting.get_by_name(conf_key)
             plugin_settings[v["name"]] = setting.app_settings_value if setting else None
         log.debug('Settings for auth plugin %s: %s', plugin_name, plugin_settings)
 
-        if not str2bool(plugin_settings["enabled"]):
+        if not asbool(plugin_settings["enabled"]):
             log.info("Authentication plugin %s is disabled, skipping for %s",
                      module, username)
             continue
--- a/kallithea/lib/auth_modules/auth_container.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/auth_modules/auth_container.py	Thu May 27 21:27:37 2021 +0200
@@ -29,8 +29,8 @@
 
 from kallithea.lib import auth_modules
 from kallithea.lib.compat import hybrid_property
-from kallithea.lib.utils2 import str2bool
-from kallithea.model.db import Setting
+from kallithea.lib.utils2 import asbool
+from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
@@ -131,7 +131,7 @@
             username = environ.get(header)
             log.debug('extracted %s:%s', header, username)
 
-        if username and str2bool(settings.get('clean_username')):
+        if username and asbool(settings.get('clean_username')):
             log.debug('Received username %s from container', username)
             username = self._clean_username(username)
             log.debug('New cleanup user is: %s', username)
@@ -212,10 +212,10 @@
 
     def get_managed_fields(self):
         fields = ['username', 'password']
-        if(Setting.get_by_name('auth_container_email_header').app_settings_value):
+        if(db.Setting.get_by_name('auth_container_email_header').app_settings_value):
             fields.append('email')
-        if(Setting.get_by_name('auth_container_firstname_header').app_settings_value):
+        if(db.Setting.get_by_name('auth_container_firstname_header').app_settings_value):
             fields.append('firstname')
-        if(Setting.get_by_name('auth_container_lastname_header').app_settings_value):
+        if(db.Setting.get_by_name('auth_container_lastname_header').app_settings_value):
             fields.append('lastname')
         return fields
--- a/kallithea/lib/auth_modules/auth_internal.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/auth_modules/auth_internal.py	Thu May 27 21:27:37 2021 +0200
@@ -28,7 +28,7 @@
 
 import logging
 
-from kallithea.lib import auth_modules
+from kallithea.lib import auth_modules, utils2
 from kallithea.lib.compat import hybrid_property
 
 
@@ -78,8 +78,7 @@
         }
         log.debug('user data: %s', user_data)
 
-        from kallithea.lib import auth
-        password_match = auth.check_password(password, userobj.password)
+        password_match = utils2.check_password(password, userobj.password)
         if userobj.is_default_user:
             log.info('user %s authenticated correctly as anonymous user',
                      username)
--- a/kallithea/lib/base.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,649 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-"""
-kallithea.lib.base
-~~~~~~~~~~~~~~~~~~
-
-The base Controller API
-Provides the BaseController class for subclassing. And usage in different
-controllers
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Oct 06, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import base64
-import datetime
-import logging
-import traceback
-import warnings
-
-import decorator
-import paste.auth.basic
-import paste.httpexceptions
-import paste.httpheaders
-import webob.exc
-from tg import TGController, config, render_template, request, response, session
-from tg import tmpl_context as c
-from tg.i18n import ugettext as _
-
-from kallithea import BACKENDS, __version__
-from kallithea.config.routing import url
-from kallithea.lib import auth_modules, ext_json
-from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
-from kallithea.lib.exceptions import UserCreationError
-from kallithea.lib.utils import get_repo_slug, is_valid_repo
-from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, safe_str, set_hook_environment, str2bool
-from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
-from kallithea.model import meta
-from kallithea.model.db import PullRequest, Repository, Setting, User
-from kallithea.model.scm import ScmModel
-
-
-log = logging.getLogger(__name__)
-
-
-def render(template_path):
-    return render_template({'url': url}, 'mako', template_path)
-
-
-def _filter_proxy(ip):
-    """
-    HEADERS can have multiple ips inside the left-most being the original
-    client, and each successive proxy that passed the request adding the IP
-    address where it received the request from.
-
-    :param ip:
-    """
-    if ',' in ip:
-        _ips = ip.split(',')
-        _first_ip = _ips[0].strip()
-        log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
-        return _first_ip
-    return ip
-
-
-def _get_ip_addr(environ):
-    proxy_key = 'HTTP_X_REAL_IP'
-    proxy_key2 = 'HTTP_X_FORWARDED_FOR'
-    def_key = 'REMOTE_ADDR'
-
-    ip = environ.get(proxy_key)
-    if ip:
-        return _filter_proxy(ip)
-
-    ip = environ.get(proxy_key2)
-    if ip:
-        return _filter_proxy(ip)
-
-    ip = environ.get(def_key, '0.0.0.0')
-    return _filter_proxy(ip)
-
-
-def get_path_info(environ):
-    """Return PATH_INFO from environ ... using tg.original_request if available.
-
-    In Python 3 WSGI, PATH_INFO is a unicode str, but kind of contains encoded
-    bytes. The code points are guaranteed to only use the lower 8 bit bits, and
-    encoding the string with the 1:1 encoding latin1 will give the
-    corresponding byte string ... which then can be decoded to proper unicode.
-    """
-    org_req = environ.get('tg.original_request')
-    if org_req is not None:
-        environ = org_req.environ
-    return safe_str(environ['PATH_INFO'].encode('latin1'))
-
-
-def log_in_user(user, remember, is_external_auth, ip_addr):
-    """
-    Log a `User` in and update session and cookies. If `remember` is True,
-    the session cookie is set to expire in a year; otherwise, it expires at
-    the end of the browser session.
-
-    Returns populated `AuthUser` object.
-    """
-    # It should not be possible to explicitly log in as the default user.
-    assert not user.is_default_user, user
-
-    auth_user = AuthUser.make(dbuser=user, is_external_auth=is_external_auth, ip_addr=ip_addr)
-    if auth_user is None:
-        return None
-
-    user.update_lastlogin()
-    meta.Session().commit()
-
-    # Start new session to prevent session fixation attacks.
-    session.invalidate()
-    session['authuser'] = cookie = auth_user.to_cookie()
-
-    # If they want to be remembered, update the cookie.
-    # NOTE: Assumes that beaker defaults to browser session cookie.
-    if remember:
-        t = datetime.datetime.now() + datetime.timedelta(days=365)
-        session._set_cookie_expires(t)
-
-    session.save()
-
-    log.info('user %s is now authenticated and stored in '
-             'session, session attrs %s', user.username, cookie)
-
-    # dumps session attrs back to cookie
-    session._update_cookie_out()
-
-    return auth_user
-
-
-class BasicAuth(paste.auth.basic.AuthBasicAuthenticator):
-
-    def __init__(self, realm, authfunc, auth_http_code=None):
-        self.realm = realm
-        self.authfunc = authfunc
-        self._rc_auth_http_code = auth_http_code
-
-    def build_authentication(self, environ):
-        head = paste.httpheaders.WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
-        # Consume the whole body before sending a response
-        try:
-            request_body_size = int(environ.get('CONTENT_LENGTH', 0))
-        except (ValueError):
-            request_body_size = 0
-        environ['wsgi.input'].read(request_body_size)
-        if self._rc_auth_http_code and self._rc_auth_http_code == '403':
-            # return 403 if alternative http return code is specified in
-            # Kallithea config
-            return paste.httpexceptions.HTTPForbidden(headers=head)
-        return paste.httpexceptions.HTTPUnauthorized(headers=head)
-
-    def authenticate(self, environ):
-        authorization = paste.httpheaders.AUTHORIZATION(environ)
-        if not authorization:
-            return self.build_authentication(environ)
-        (authmeth, auth) = authorization.split(' ', 1)
-        if 'basic' != authmeth.lower():
-            return self.build_authentication(environ)
-        auth = safe_str(base64.b64decode(auth.strip()))
-        _parts = auth.split(':', 1)
-        if len(_parts) == 2:
-            username, password = _parts
-            if self.authfunc(username, password, environ) is not None:
-                return username
-        return self.build_authentication(environ)
-
-    __call__ = authenticate
-
-
-class BaseVCSController(object):
-    """Base controller for handling Mercurial/Git protocol requests
-    (coming from a VCS client, and not a browser).
-    """
-
-    scm_alias = None # 'hg' / 'git'
-
-    def __init__(self, application, config):
-        self.application = application
-        self.config = config
-        # base path of repo locations
-        self.basepath = self.config['base_path']
-        # authenticate this VCS request using the authentication modules
-        self.authenticate = BasicAuth('', auth_modules.authenticate,
-                                      config.get('auth_ret_code'))
-
-    @classmethod
-    def parse_request(cls, environ):
-        """If request is parsed as a request for this VCS, return a namespace with the parsed request.
-        If the request is unknown, return None.
-        """
-        raise NotImplementedError()
-
-    def _authorize(self, environ, action, repo_name, ip_addr):
-        """Authenticate and authorize user.
-
-        Since we're dealing with a VCS client and not a browser, we only
-        support HTTP basic authentication, either directly via raw header
-        inspection, or by using container authentication to delegate the
-        authentication to the web server.
-
-        Returns (user, None) on successful authentication and authorization.
-        Returns (None, wsgi_app) to send the wsgi_app response to the client.
-        """
-        # Use anonymous access if allowed for action on repo.
-        default_user = User.get_default_user()
-        default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
-        if default_authuser is None:
-            log.debug('No anonymous access at all') # move on to proper user auth
-        else:
-            if self._check_permission(action, default_authuser, repo_name):
-                return default_authuser, None
-            log.debug('Not authorized to access this repository as anonymous user')
-
-        username = None
-        #==============================================================
-        # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
-        # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
-        #==============================================================
-
-        # try to auth based on environ, container auth methods
-        log.debug('Running PRE-AUTH for container based authentication')
-        pre_auth = auth_modules.authenticate('', '', environ)
-        if pre_auth is not None and pre_auth.get('username'):
-            username = pre_auth['username']
-        log.debug('PRE-AUTH got %s as username', username)
-
-        # If not authenticated by the container, running basic auth
-        if not username:
-            self.authenticate.realm = self.config['realm']
-            result = self.authenticate(environ)
-            if isinstance(result, str):
-                paste.httpheaders.AUTH_TYPE.update(environ, 'basic')
-                paste.httpheaders.REMOTE_USER.update(environ, result)
-                username = result
-            else:
-                return None, result.wsgi_application
-
-        #==============================================================
-        # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
-        #==============================================================
-        try:
-            user = User.get_by_username_or_email(username)
-        except Exception:
-            log.error(traceback.format_exc())
-            return None, webob.exc.HTTPInternalServerError()
-
-        authuser = AuthUser.make(dbuser=user, ip_addr=ip_addr)
-        if authuser is None:
-            return None, webob.exc.HTTPForbidden()
-        if not self._check_permission(action, authuser, repo_name):
-            return None, webob.exc.HTTPForbidden()
-
-        return user, None
-
-    def _handle_request(self, environ, start_response):
-        raise NotImplementedError()
-
-    def _check_permission(self, action, authuser, repo_name):
-        """
-        :param action: 'push' or 'pull'
-        :param user: `AuthUser` instance
-        :param repo_name: repository name
-        """
-        if action == 'push':
-            if not HasPermissionAnyMiddleware('repository.write',
-                                              'repository.admin')(authuser,
-                                                                  repo_name):
-                return False
-
-        elif action == 'pull':
-            #any other action need at least read permission
-            if not HasPermissionAnyMiddleware('repository.read',
-                                              'repository.write',
-                                              'repository.admin')(authuser,
-                                                                  repo_name):
-                return False
-
-        else:
-            assert False, action
-
-        return True
-
-    def _get_ip_addr(self, environ):
-        return _get_ip_addr(environ)
-
-    def __call__(self, environ, start_response):
-        try:
-            # try parsing a request for this VCS - if it fails, call the wrapped app
-            parsed_request = self.parse_request(environ)
-            if parsed_request is None:
-                return self.application(environ, start_response)
-
-            # skip passing error to error controller
-            environ['pylons.status_code_redirect'] = True
-
-            # quick check if repo exists...
-            if not is_valid_repo(parsed_request.repo_name, self.basepath, self.scm_alias):
-                raise webob.exc.HTTPNotFound()
-
-            if parsed_request.action is None:
-                # Note: the client doesn't get the helpful error message
-                raise webob.exc.HTTPBadRequest('Unable to detect pull/push action for %r! Are you using a nonstandard command or client?' % parsed_request.repo_name)
-
-            #======================================================================
-            # CHECK PERMISSIONS
-            #======================================================================
-            ip_addr = self._get_ip_addr(environ)
-            user, response_app = self._authorize(environ, parsed_request.action, parsed_request.repo_name, ip_addr)
-            if response_app is not None:
-                return response_app(environ, start_response)
-
-            #======================================================================
-            # REQUEST HANDLING
-            #======================================================================
-            set_hook_environment(user.username, ip_addr,
-                parsed_request.repo_name, self.scm_alias, parsed_request.action)
-
-            try:
-                log.info('%s action on %s repo "%s" by "%s" from %s',
-                         parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr)
-                app = self._make_app(parsed_request)
-                return app(environ, start_response)
-            except Exception:
-                log.error(traceback.format_exc())
-                raise webob.exc.HTTPInternalServerError()
-
-        except webob.exc.HTTPException as e:
-            return e(environ, start_response)
-
-
-class BaseController(TGController):
-
-    def _before(self, *args, **kwargs):
-        """
-        _before is called before controller methods and after __call__
-        """
-        if request.needs_csrf_check:
-            # CSRF protection: Whenever a request has ambient authority (whether
-            # through a session cookie or its origin IP address), it must include
-            # the correct token, unless the HTTP method is GET or HEAD (and thus
-            # guaranteed to be side effect free. In practice, the only situation
-            # where we allow side effects without ambient authority is when the
-            # authority comes from an API key; and that is handled above.
-            from kallithea.lib import helpers as h
-            token = request.POST.get(h.session_csrf_secret_name)
-            if not token or token != h.session_csrf_secret_token():
-                log.error('CSRF check failed')
-                raise webob.exc.HTTPForbidden()
-
-        c.kallithea_version = __version__
-        rc_config = Setting.get_app_settings()
-
-        # Visual options
-        c.visual = AttributeDict({})
-
-        ## DB stored
-        c.visual.show_public_icon = str2bool(rc_config.get('show_public_icon'))
-        c.visual.show_private_icon = str2bool(rc_config.get('show_private_icon'))
-        c.visual.stylify_metalabels = str2bool(rc_config.get('stylify_metalabels'))
-        c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100))
-        c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100))
-        c.visual.repository_fields = str2bool(rc_config.get('repository_fields'))
-        c.visual.show_version = str2bool(rc_config.get('show_version'))
-        c.visual.use_gravatar = str2bool(rc_config.get('use_gravatar'))
-        c.visual.gravatar_url = rc_config.get('gravatar_url')
-
-        c.ga_code = rc_config.get('ga_code')
-        # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code
-        if c.ga_code and '<' not in c.ga_code:
-            c.ga_code = '''<script type="text/javascript">
-                var _gaq = _gaq || [];
-                _gaq.push(['_setAccount', '%s']);
-                _gaq.push(['_trackPageview']);
-
-                (function() {
-                    var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
-                    ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
-                    var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
-                    })();
-            </script>''' % c.ga_code
-        c.site_name = rc_config.get('title')
-        c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI
-        c.clone_ssh_tmpl = rc_config.get('clone_ssh_tmpl') or Repository.DEFAULT_CLONE_SSH
-
-        ## INI stored
-        c.visual.allow_repo_location_change = str2bool(config.get('allow_repo_location_change', True))
-        c.visual.allow_custom_hooks_settings = str2bool(config.get('allow_custom_hooks_settings', True))
-        c.ssh_enabled = str2bool(config.get('ssh_enabled', False))
-
-        c.instance_id = config.get('instance_id')
-        c.issues_url = config.get('bugtracker', url('issues_url'))
-        # END CONFIG VARS
-
-        c.repo_name = get_repo_slug(request)  # can be empty
-        c.backends = list(BACKENDS)
-
-        self.cut_off_limit = safe_int(config.get('cut_off_limit'))
-
-        c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count()
-
-        self.scm_model = ScmModel()
-
-    @staticmethod
-    def _determine_auth_user(session_authuser, ip_addr):
-        """
-        Create an `AuthUser` object given the API key/bearer token
-        (if any) and the value of the authuser session cookie.
-        Returns None if no valid user is found (like not active or no access for IP).
-        """
-
-        # Authenticate by session cookie
-        # In ancient login sessions, 'authuser' may not be a dict.
-        # In that case, the user will have to log in again.
-        # v0.3 and earlier included an 'is_authenticated' key; if present,
-        # this must be True.
-        if isinstance(session_authuser, dict) and session_authuser.get('is_authenticated', True):
-            return AuthUser.from_cookie(session_authuser, ip_addr=ip_addr)
-
-        # Authenticate by auth_container plugin (if enabled)
-        if any(
-            plugin.is_container_auth
-            for plugin in auth_modules.get_auth_plugins()
-        ):
-            try:
-                user_info = auth_modules.authenticate('', '', request.environ)
-            except UserCreationError as e:
-                from kallithea.lib import helpers as h
-                h.flash(e, 'error', logf=log.error)
-            else:
-                if user_info is not None:
-                    username = user_info['username']
-                    user = User.get_by_username(username, case_insensitive=True)
-                    return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr)
-
-        # User is default user (if active) or anonymous
-        default_user = User.get_default_user()
-        authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
-        if authuser is None: # fall back to anonymous
-            authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make?
-        return authuser
-
-    @staticmethod
-    def _basic_security_checks():
-        """Perform basic security/sanity checks before processing the request."""
-
-        # Only allow the following HTTP request methods.
-        if request.method not in ['GET', 'HEAD', 'POST']:
-            raise webob.exc.HTTPMethodNotAllowed()
-
-        # Also verify the _method override - no longer allowed.
-        if request.params.get('_method') is None:
-            pass # no override, no problem
-        else:
-            raise webob.exc.HTTPMethodNotAllowed()
-
-        # Make sure CSRF token never appears in the URL. If so, invalidate it.
-        from kallithea.lib import helpers as h
-        if h.session_csrf_secret_name in request.GET:
-            log.error('CSRF key leak detected')
-            session.pop(h.session_csrf_secret_name, None)
-            session.save()
-            h.flash(_('CSRF token leak has been detected - all form tokens have been expired'),
-                    category='error')
-
-        # WebOb already ignores request payload parameters for anything other
-        # than POST/PUT, but double-check since other Kallithea code relies on
-        # this assumption.
-        if request.method not in ['POST', 'PUT'] and request.POST:
-            log.error('%r request with payload parameters; WebOb should have stopped this', request.method)
-            raise webob.exc.HTTPBadRequest()
-
-    def __call__(self, environ, context):
-        try:
-            ip_addr = _get_ip_addr(environ)
-            self._basic_security_checks()
-
-            api_key = request.GET.get('api_key')
-            try:
-                # Request.authorization may raise ValueError on invalid input
-                type, params = request.authorization
-            except (ValueError, TypeError):
-                pass
-            else:
-                if type.lower() == 'bearer':
-                    api_key = params # bearer token is an api key too
-
-            if api_key is None:
-                authuser = self._determine_auth_user(
-                    session.get('authuser'),
-                    ip_addr=ip_addr,
-                )
-                needs_csrf_check = request.method not in ['GET', 'HEAD']
-
-            else:
-                dbuser = User.get_by_api_key(api_key)
-                if dbuser is None:
-                    log.info('No db user found for authentication with API key ****%s from %s',
-                             api_key[-4:], ip_addr)
-                authuser = AuthUser.make(dbuser=dbuser, is_external_auth=True, ip_addr=ip_addr)
-                needs_csrf_check = False # API key provides CSRF protection
-
-            if authuser is None:
-                log.info('No valid user found')
-                raise webob.exc.HTTPForbidden()
-
-            # set globals for auth user
-            request.authuser = authuser
-            request.ip_addr = ip_addr
-            request.needs_csrf_check = needs_csrf_check
-
-            log.info('IP: %s User: %s Request: %s',
-                request.ip_addr, request.authuser,
-                get_path_info(environ),
-            )
-            return super(BaseController, self).__call__(environ, context)
-        except webob.exc.HTTPException as e:
-            return e
-
-
-class BaseRepoController(BaseController):
-    """
-    Base class for controllers responsible for loading all needed data for
-    repository loaded items are
-
-    c.db_repo_scm_instance: instance of scm repository
-    c.db_repo: instance of db
-    c.repository_followers: number of followers
-    c.repository_forks: number of forks
-    c.repository_following: weather the current user is following the current repo
-    """
-
-    def _before(self, *args, **kwargs):
-        super(BaseRepoController, self)._before(*args, **kwargs)
-        if c.repo_name:  # extracted from request by base-base BaseController._before
-            _dbr = Repository.get_by_repo_name(c.repo_name)
-            if not _dbr:
-                return
-
-            log.debug('Found repository in database %s with state `%s`',
-                      _dbr, _dbr.repo_state)
-            route = getattr(request.environ.get('routes.route'), 'name', '')
-
-            # allow to delete repos that are somehow damages in filesystem
-            if route in ['delete_repo']:
-                return
-
-            if _dbr.repo_state in [Repository.STATE_PENDING]:
-                if route in ['repo_creating_home']:
-                    return
-                check_url = url('repo_creating_home', repo_name=c.repo_name)
-                raise webob.exc.HTTPFound(location=check_url)
-
-            dbr = c.db_repo = _dbr
-            c.db_repo_scm_instance = c.db_repo.scm_instance
-            if c.db_repo_scm_instance is None:
-                log.error('%s this repository is present in database but it '
-                          'cannot be created as an scm instance', c.repo_name)
-                from kallithea.lib import helpers as h
-                h.flash(_('Repository not found in the filesystem'),
-                        category='error')
-                raise webob.exc.HTTPNotFound()
-
-            # some globals counter for menu
-            c.repository_followers = self.scm_model.get_followers(dbr)
-            c.repository_forks = self.scm_model.get_forks(dbr)
-            c.repository_pull_requests = self.scm_model.get_pull_requests(dbr)
-            c.repository_following = self.scm_model.is_following_repo(
-                                    c.repo_name, request.authuser.user_id)
-
-    @staticmethod
-    def _get_ref_rev(repo, ref_type, ref_name, returnempty=False):
-        """
-        Safe way to get changeset. If error occurs show error.
-        """
-        from kallithea.lib import helpers as h
-        try:
-            return repo.scm_instance.get_ref_revision(ref_type, ref_name)
-        except EmptyRepositoryError as e:
-            if returnempty:
-                return repo.scm_instance.EMPTY_CHANGESET
-            h.flash(_('There are no changesets yet'), category='error')
-            raise webob.exc.HTTPNotFound()
-        except ChangesetDoesNotExistError as e:
-            h.flash(_('Changeset for %s %s not found in %s') %
-                              (ref_type, ref_name, repo.repo_name),
-                    category='error')
-            raise webob.exc.HTTPNotFound()
-        except RepositoryError as e:
-            log.error(traceback.format_exc())
-            h.flash(e, category='error')
-            raise webob.exc.HTTPBadRequest()
-
-
-@decorator.decorator
-def jsonify(func, *args, **kwargs):
-    """Action decorator that formats output for JSON
-
-    Given a function that will return content, this decorator will turn
-    the result into JSON, with a content-type of 'application/json' and
-    output it.
-    """
-    response.headers['Content-Type'] = 'application/json; charset=utf-8'
-    data = func(*args, **kwargs)
-    if isinstance(data, (list, tuple)):
-        # A JSON list response is syntactically valid JavaScript and can be
-        # loaded and executed as JavaScript by a malicious third-party site
-        # using <script>, which can lead to cross-site data leaks.
-        # JSON responses should therefore be scalars or objects (i.e. Python
-        # dicts), because a JSON object is a syntax error if intepreted as JS.
-        msg = "JSON responses with Array envelopes are susceptible to " \
-              "cross-site data leak attacks, see " \
-              "https://web.archive.org/web/20120519231904/http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
-        warnings.warn(msg, Warning, 2)
-        log.warning(msg)
-    log.debug("Returning JSON wrapped action output")
-    return ascii_bytes(ext_json.dumps(data))
-
-@decorator.decorator
-def IfSshEnabled(func, *args, **kwargs):
-    """Decorator for functions that can only be called if SSH access is enabled.
-
-    If SSH access is disabled in the configuration file, HTTPNotFound is raised.
-    """
-    if not c.ssh_enabled:
-        from kallithea.lib import helpers as h
-        h.flash(_("SSH access is disabled."), category='warning')
-        raise webob.exc.HTTPNotFound()
-    return func(*args, **kwargs)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/celery_app.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+
+"""
+Kallithea wrapper of Celery
+
+The Celery configuration is in the Kallithea ini file but must be converted to an
+entirely different format before Celery can use it.
+
+We read the configuration from tg.config at module import time. This module can
+thus not be imported in global scope but must be imported on demand in function
+scope after tg.config has been initialized.
+
+To make sure that the config really has been initialized, we check one of the
+mandatory settings.
+"""
+
+import logging
+
+
+class CeleryConfig(object):
+    imports = [
+        'kallithea.lib.indexers.daemon',
+        'kallithea.model.async_tasks',
+        'kallithea.model.notification',
+        'kallithea.model.repo',
+    ]
+    task_always_eager = False
+
+list_config_names = {'imports', 'accept_content'}
+
+
+desupported = set([
+    'broker.url',
+    'celery.accept.content',
+    'celery.always.eager',
+    'celery.amqp.task.result.expires',
+    'celeryd.concurrency',
+    'celeryd.max.tasks.per.child',
+    'celery.result.backend',  # Note: the .ini template used this instead of 'celery.result_backend' in 0.6
+    'celery.result.dburi',
+    'celery.result.serialier',
+    'celery.result.serializer',
+    'celery.send.task.error.emails',
+    'celery.task_always_eager',  # still a valid configuration in celery, but not supported in Kallithea
+    'celery.task.serializer',
+])
+
+
+log = logging.getLogger(__name__)
+
+
+def make_celery_config(config):
+    """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
+
+    celery_config = CeleryConfig()
+
+    for config_key, config_value in sorted(config.items()):
+        if config_key in desupported and config_value:
+            log.error('Celery configuration setting %r is no longer supported', config_key)
+        parts = config_key.split('.', 1)
+        if parts[0] == 'celery' and len(parts) == 2:  # Celery 4 config key
+            celery_key = parts[1]
+        else:
+            continue
+        if not isinstance(config_value, str):
+            continue
+        if celery_key in list_config_names:
+            celery_value = config_value.split()
+        elif config_value.isdigit():
+            celery_value = int(config_value)
+        elif config_value.lower() in ['true', 'false']:
+            celery_value = config_value.lower() == 'true'
+        else:
+            celery_value = config_value
+        setattr(celery_config, celery_key, celery_value)
+    return celery_config
--- a/kallithea/lib/celerylib/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/celerylib/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -28,61 +28,46 @@
 
 import logging
 import os
-from hashlib import md5
+from hashlib import sha1
 
 from decorator import decorator
 from tg import config
 
 import kallithea
 from kallithea.lib.pidlock import DaemonLock, LockHeld
-from kallithea.lib.utils2 import safe_bytes
+from kallithea.lib.utils2 import asbool, safe_bytes
 from kallithea.model import meta
 
 
 log = logging.getLogger(__name__)
 
 
-class FakeTask(object):
-    """Fake a sync result to make it look like a finished task"""
-
-    def __init__(self, result):
-        self.result = result
-
-    def failed(self):
-        return False
-
-    traceback = None # if failed
-
-    task_id = None
-
-
 def task(f_org):
-    """Wrapper of celery.task.task, running async if CELERY_APP
+    """Wrapper of celery.task.task, run at import time, before kallithea.CONFIG has been set, and before kallithea.CELERY_APP has been configured.
     """
 
-    if kallithea.CELERY_APP:
-        def f_async(*args, **kwargs):
-            log.info('executing %s task', f_org.__name__)
+    def f_async(*args, **kwargs):
+        log.info('executing async task %s', f_org.__name__)
+        try:
+            f_org(*args, **kwargs)
+        finally:
+            meta.Session.remove()  # prevent reuse of auto created db sessions
+            log.info('executed async task %s', f_org.__name__)
+
+    runner = kallithea.CELERY_APP.task(name=f_org.__name__, ignore_result=True)(f_async)
+
+    def f_wrapped(*args, **kwargs):
+        if asbool(kallithea.CONFIG.get('use_celery')):
+            t = runner.apply_async(args=args, kwargs=kwargs)
+            log.info('executing async task %s - id %s', f_org, t.task_id)
+        else:
+            # invoke f_org directly, without the meta.Session.remove in f_async
+            log.info('executing sync task %s', f_org.__name__)
             try:
                 f_org(*args, **kwargs)
-            finally:
-                log.info('executed %s task', f_org.__name__)
-        f_async.__name__ = f_org.__name__
-        runner = kallithea.CELERY_APP.task(ignore_result=True)(f_async)
-
-        def f_wrapped(*args, **kwargs):
-            t = runner.apply_async(args=args, kwargs=kwargs)
-            log.info('executing task %s in async mode - id %s', f_org, t.task_id)
-            return t
-    else:
-        def f_wrapped(*args, **kwargs):
-            log.info('executing task %s in sync', f_org.__name__)
-            try:
-                result = f_org(*args, **kwargs)
             except Exception as e:
-                log.error('exception executing sync task %s in sync: %r', f_org.__name__, e)
-                raise # TODO: return this in FakeTask as with async tasks?
-            return FakeTask(result)
+                log.error('exception executing sync task %s: %r', f_org.__name__, e)
+                raise # TODO: report errors differently ... and consistently between sync and async
 
     return f_wrapped
 
@@ -94,18 +79,16 @@
     func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
 
     lockkey = 'task_%s.lock' % \
-        md5(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest()
+        sha1(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest()
     return lockkey
 
 
 def locked_task(func):
     def __wrapper(func, *fargs, **fkwargs):
         lockkey = __get_lockkey(func, *fargs, **fkwargs)
-        lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir']  # Backward compatibility for TurboGears < 2.4
-
         log.info('running task with lockkey %s', lockkey)
         try:
-            l = DaemonLock(os.path.join(lockkey_path, lockkey))
+            l = DaemonLock(os.path.join(config['cache_dir'], lockkey))
             ret = func(*fargs, **fkwargs)
             l.release()
             return ret
@@ -114,20 +97,3 @@
             return 'Task with key %s already running' % lockkey
 
     return decorator(__wrapper, func)
-
-
-def get_session():
-    sa = meta.Session()
-    return sa
-
-
-def dbsession(func):
-    def __wrapper(func, *fargs, **fkwargs):
-        try:
-            ret = func(*fargs, **fkwargs)
-            return ret
-        finally:
-            if kallithea.CELERY_APP and not kallithea.CELERY_EAGER:
-                meta.Session.remove()
-
-    return decorator(__wrapper, func)
--- a/kallithea/lib/celerylib/tasks.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,498 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.celerylib.tasks
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Kallithea task modules, containing all task that suppose to be run
-by celery daemon
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Oct 6, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import email.utils
-import os
-import traceback
-from collections import OrderedDict
-from operator import itemgetter
-from time import mktime
-
-import celery.utils.log
-from tg import config
-
-import kallithea
-from kallithea.lib import celerylib, ext_json
-from kallithea.lib.helpers import person
-from kallithea.lib.hooks import log_create_repository
-from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
-from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import ascii_bytes, str2bool
-from kallithea.lib.vcs.utils import author_email
-from kallithea.model.db import RepoGroup, Repository, Statistics, User
-
-
-__all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
-
-
-log = celery.utils.log.get_task_logger(__name__)
-
-
-@celerylib.task
-@celerylib.locked_task
-@celerylib.dbsession
-def whoosh_index(repo_location, full_index):
-    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
-    celerylib.get_session() # initialize database connection
-
-    index_location = config['index_dir']
-    WhooshIndexingDaemon(index_location=index_location,
-                         repo_location=repo_location) \
-                         .run(full_index=full_index)
-
-
-# for js data compatibility cleans the key for person from '
-def akc(k):
-    return person(k).replace('"', '')
-
-
-@celerylib.task
-@celerylib.dbsession
-def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
-    DBS = celerylib.get_session()
-    lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y,
-                            ts_max_y)
-    lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir']  # Backward compatibility for TurboGears < 2.4
-
-    log.info('running task with lockkey %s', lockkey)
-
-    try:
-        lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey))
-
-        co_day_auth_aggr = {}
-        commits_by_day_aggregate = {}
-        repo = Repository.get_by_repo_name(repo_name)
-        if repo is None:
-            return True
-
-        repo = repo.scm_instance
-        repo_size = repo.count()
-        # return if repo have no revisions
-        if repo_size < 1:
-            lock.release()
-            return True
-
-        skip_date_limit = True
-        parse_limit = int(config.get('commit_parse_limit'))
-        last_rev = None
-        last_cs = None
-        timegetter = itemgetter('time')
-
-        dbrepo = DBS.query(Repository) \
-            .filter(Repository.repo_name == repo_name).scalar()
-        cur_stats = DBS.query(Statistics) \
-            .filter(Statistics.repository == dbrepo).scalar()
-
-        if cur_stats is not None:
-            last_rev = cur_stats.stat_on_revision
-
-        if last_rev == repo.get_changeset().revision and repo_size > 1:
-            # pass silently without any work if we're not on first revision or
-            # current state of parsing revision(from db marker) is the
-            # last revision
-            lock.release()
-            return True
-
-        if cur_stats:
-            commits_by_day_aggregate = OrderedDict(ext_json.loads(
-                                        cur_stats.commit_activity_combined))
-            co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)
-
-        log.debug('starting parsing %s', parse_limit)
-
-        last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
-        log.debug('Getting revisions from %s to %s',
-             last_rev, last_rev + parse_limit
-        )
-        for cs in repo[last_rev:last_rev + parse_limit]:
-            log.debug('parsing %s', cs)
-            last_cs = cs  # remember last parsed changeset
-            tt = cs.date.timetuple()
-            k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))
-
-            if akc(cs.author) in co_day_auth_aggr:
-                try:
-                    l = [timegetter(x) for x in
-                         co_day_auth_aggr[akc(cs.author)]['data']]
-                    time_pos = l.index(k)
-                except ValueError:
-                    time_pos = None
-
-                if time_pos is not None and time_pos >= 0:
-                    datadict = \
-                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
-
-                    datadict["commits"] += 1
-                    datadict["added"] += len(cs.added)
-                    datadict["changed"] += len(cs.changed)
-                    datadict["removed"] += len(cs.removed)
-
-                else:
-                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
-
-                        datadict = {"time": k,
-                                    "commits": 1,
-                                    "added": len(cs.added),
-                                    "changed": len(cs.changed),
-                                    "removed": len(cs.removed),
-                                   }
-                        co_day_auth_aggr[akc(cs.author)]['data'] \
-                            .append(datadict)
-
-            else:
-                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
-                    co_day_auth_aggr[akc(cs.author)] = {
-                                        "label": akc(cs.author),
-                                        "data": [{"time": k,
-                                                 "commits": 1,
-                                                 "added": len(cs.added),
-                                                 "changed": len(cs.changed),
-                                                 "removed": len(cs.removed),
-                                                 }],
-                                        "schema": ["commits"],
-                                        }
-
-            # gather all data by day
-            if k in commits_by_day_aggregate:
-                commits_by_day_aggregate[k] += 1
-            else:
-                commits_by_day_aggregate[k] = 1
-
-        overview_data = sorted(commits_by_day_aggregate.items(),
-                               key=itemgetter(0))
-
-        if not co_day_auth_aggr:
-            co_day_auth_aggr[akc(repo.contact)] = {
-                "label": akc(repo.contact),
-                "data": [0, 1],
-                "schema": ["commits"],
-            }
-
-        stats = cur_stats if cur_stats else Statistics()
-        stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
-        stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data))
-
-        log.debug('last revision %s', last_rev)
-        leftovers = len(repo.revisions[last_rev:])
-        log.debug('revisions to parse %s', leftovers)
-
-        if last_rev == 0 or leftovers < parse_limit:
-            log.debug('getting code trending stats')
-            stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name)))
-
-        try:
-            stats.repository = dbrepo
-            stats.stat_on_revision = last_cs.revision if last_cs else 0
-            DBS.add(stats)
-            DBS.commit()
-        except:
-            log.error(traceback.format_exc())
-            DBS.rollback()
-            lock.release()
-            return False
-
-        # final release
-        lock.release()
-
-        # execute another task if celery is enabled
-        if len(repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0:
-            get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
-        elif recurse_limit <= 0:
-            log.debug('Not recursing - limit has been reached')
-        else:
-            log.debug('Not recursing')
-    except celerylib.LockHeld:
-        log.info('Task with key %s already running', lockkey)
-        return 'Task with key %s already running' % lockkey
-
-
-@celerylib.task
-@celerylib.dbsession
-def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
-    """
-    Sends an email with defined parameters from the .ini files.
-
-    :param recipients: list of recipients, if this is None, the defined email
-        address from field 'email_to' and all admins is used instead
-    :param subject: subject of the mail
-    :param body: body of the mail
-    :param html_body: html version of body
-    :param headers: dictionary of prepopulated e-mail headers
-    :param from_name: full name to be used as sender of this mail - often a
-    .full_name_or_username value
-    """
-    assert isinstance(recipients, list), recipients
-    if headers is None:
-        headers = {}
-    else:
-        # do not modify the original headers object passed by the caller
-        headers = headers.copy()
-
-    email_config = config
-    email_prefix = email_config.get('email_prefix', '')
-    if email_prefix:
-        subject = "%s %s" % (email_prefix, subject)
-
-    if not recipients:
-        # if recipients are not defined we send to email_config + all admins
-        recipients = [u.email for u in User.query()
-                      .filter(User.admin == True).all()]
-        if email_config.get('email_to') is not None:
-            recipients += email_config.get('email_to').split(',')
-
-        # If there are still no recipients, there are no admins and no address
-        # configured in email_to, so return.
-        if not recipients:
-            log.error("No recipients specified and no fallback available.")
-            return False
-
-        log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
-
-    # SMTP sender
-    envelope_from = email_config.get('app_email_from', 'Kallithea')
-    # 'From' header
-    if from_name is not None:
-        # set From header based on from_name but with a generic e-mail address
-        # In case app_email_from is in "Some Name <e-mail>" format, we first
-        # extract the e-mail address.
-        envelope_addr = author_email(envelope_from)
-        headers['From'] = '"%s" <%s>' % (
-            email.utils.quote('%s (no-reply)' % from_name),
-            envelope_addr)
-
-    user = email_config.get('smtp_username')
-    passwd = email_config.get('smtp_password')
-    mail_server = email_config.get('smtp_server')
-    mail_port = email_config.get('smtp_port')
-    tls = str2bool(email_config.get('smtp_use_tls'))
-    ssl = str2bool(email_config.get('smtp_use_ssl'))
-    debug = str2bool(email_config.get('debug'))
-    smtp_auth = email_config.get('smtp_auth')
-
-    logmsg = ("Mail details:\n"
-              "recipients: %s\n"
-              "headers: %s\n"
-              "subject: %s\n"
-              "body:\n%s\n"
-              "html:\n%s\n"
-              % (' '.join(recipients), headers, subject, body, html_body))
-
-    if mail_server:
-        log.debug("Sending e-mail. " + logmsg)
-    else:
-        log.error("SMTP mail server not configured - cannot send e-mail.")
-        log.warning(logmsg)
-        return False
-
-    try:
-        m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth,
-                       mail_port, ssl, tls, debug=debug)
-        m.send(recipients, subject, body, html_body, headers=headers)
-    except:
-        log.error('Mail sending failed')
-        log.error(traceback.format_exc())
-        return False
-    return True
-
-
-@celerylib.task
-@celerylib.dbsession
-def create_repo(form_data, cur_user):
-    from kallithea.model.repo import RepoModel
-    from kallithea.model.db import Setting
-
-    DBS = celerylib.get_session()
-
-    cur_user = User.guess_instance(cur_user)
-
-    owner = cur_user
-    repo_name = form_data['repo_name']
-    repo_name_full = form_data['repo_name_full']
-    repo_type = form_data['repo_type']
-    description = form_data['repo_description']
-    private = form_data['repo_private']
-    clone_uri = form_data.get('clone_uri')
-    repo_group = form_data['repo_group']
-    landing_rev = form_data['repo_landing_rev']
-    copy_fork_permissions = form_data.get('copy_permissions')
-    copy_group_permissions = form_data.get('repo_copy_permissions')
-    fork_of = form_data.get('fork_parent_id')
-    state = form_data.get('repo_state', Repository.STATE_PENDING)
-
-    # repo creation defaults, private and repo_type are filled in form
-    defs = Setting.get_default_repo_settings(strip_prefix=True)
-    enable_statistics = defs.get('repo_enable_statistics')
-    enable_downloads = defs.get('repo_enable_downloads')
-
-    try:
-        repo = RepoModel()._create_repo(
-            repo_name=repo_name_full,
-            repo_type=repo_type,
-            description=description,
-            owner=owner,
-            private=private,
-            clone_uri=clone_uri,
-            repo_group=repo_group,
-            landing_rev=landing_rev,
-            fork_of=fork_of,
-            copy_fork_permissions=copy_fork_permissions,
-            copy_group_permissions=copy_group_permissions,
-            enable_statistics=enable_statistics,
-            enable_downloads=enable_downloads,
-            state=state
-        )
-
-        action_logger(cur_user, 'user_created_repo',
-                      form_data['repo_name_full'], '')
-
-        DBS.commit()
-        # now create this repo on Filesystem
-        RepoModel()._create_filesystem_repo(
-            repo_name=repo_name,
-            repo_type=repo_type,
-            repo_group=RepoGroup.guess_instance(repo_group),
-            clone_uri=clone_uri,
-        )
-        repo = Repository.get_by_repo_name(repo_name_full)
-        log_create_repository(repo.get_dict(), created_by=owner.username)
-
-        # update repo changeset caches initially
-        repo.update_changeset_cache()
-
-        # set new created state
-        repo.set_state(Repository.STATE_CREATED)
-        DBS.commit()
-    except Exception as e:
-        log.warning('Exception %s occurred when forking repository, '
-                    'doing cleanup...' % e)
-        # rollback things manually !
-        repo = Repository.get_by_repo_name(repo_name_full)
-        if repo:
-            Repository.delete(repo.repo_id)
-            DBS.commit()
-            RepoModel()._delete_filesystem_repo(repo)
-        raise
-
-    return True
-
-
-@celerylib.task
-@celerylib.dbsession
-def create_repo_fork(form_data, cur_user):
-    """
-    Creates a fork of repository using interval VCS methods
-
-    :param form_data:
-    :param cur_user:
-    """
-    from kallithea.model.repo import RepoModel
-
-    DBS = celerylib.get_session()
-
-    base_path = kallithea.CONFIG['base_path']
-    cur_user = User.guess_instance(cur_user)
-
-    repo_name = form_data['repo_name']  # fork in this case
-    repo_name_full = form_data['repo_name_full']
-
-    repo_type = form_data['repo_type']
-    owner = cur_user
-    private = form_data['private']
-    clone_uri = form_data.get('clone_uri')
-    repo_group = form_data['repo_group']
-    landing_rev = form_data['landing_rev']
-    copy_fork_permissions = form_data.get('copy_permissions')
-
-    try:
-        fork_of = Repository.guess_instance(form_data.get('fork_parent_id'))
-
-        RepoModel()._create_repo(
-            repo_name=repo_name_full,
-            repo_type=repo_type,
-            description=form_data['description'],
-            owner=owner,
-            private=private,
-            clone_uri=clone_uri,
-            repo_group=repo_group,
-            landing_rev=landing_rev,
-            fork_of=fork_of,
-            copy_fork_permissions=copy_fork_permissions
-        )
-        action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
-                      fork_of.repo_name, '')
-        DBS.commit()
-
-        source_repo_path = os.path.join(base_path, fork_of.repo_name)
-
-        # now create this repo on Filesystem
-        RepoModel()._create_filesystem_repo(
-            repo_name=repo_name,
-            repo_type=repo_type,
-            repo_group=RepoGroup.guess_instance(repo_group),
-            clone_uri=source_repo_path,
-        )
-        repo = Repository.get_by_repo_name(repo_name_full)
-        log_create_repository(repo.get_dict(), created_by=owner.username)
-
-        # update repo changeset caches initially
-        repo.update_changeset_cache()
-
-        # set new created state
-        repo.set_state(Repository.STATE_CREATED)
-        DBS.commit()
-    except Exception as e:
-        log.warning('Exception %s occurred when forking repository, '
-                    'doing cleanup...' % e)
-        # rollback things manually !
-        repo = Repository.get_by_repo_name(repo_name_full)
-        if repo:
-            Repository.delete(repo.repo_id)
-            DBS.commit()
-            RepoModel()._delete_filesystem_repo(repo)
-        raise
-
-    return True
-
-
-def __get_codes_stats(repo_name):
-    from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP
-    repo = Repository.get_by_repo_name(repo_name).scm_instance
-
-    tip = repo.get_changeset()
-    code_stats = {}
-
-    for _topnode, _dirnodes, filenodes in tip.walk('/'):
-        for filenode in filenodes:
-            ext = filenode.extension.lower()
-            if ext in LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary:
-                if ext in code_stats:
-                    code_stats[ext] += 1
-                else:
-                    code_stats[ext] = 1
-
-    return code_stats or {}
--- a/kallithea/lib/celerypylons/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,92 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-Kallithea wrapper of Celery
-
-The Celery configuration is in the Kallithea ini file but must be converted to an
-entirely different format before Celery can use it.
-
-We read the configuration from tg.config at module import time. This module can
-thus not be imported in global scope but must be imported on demand in function
-scope after tg.config has been initialized.
-
-To make sure that the config really has been initialized, we check one of the
-mandatory settings.
-"""
-
-import logging
-
-import celery
-import tg
-
-import kallithea
-
-
-class CeleryConfig(object):
-    imports = ['kallithea.lib.celerylib.tasks']
-    task_always_eager = False
-
-# map from Kallithea .ini Celery 3 config names to Celery 4 config names
-celery3_compat = {
-    'broker.url': 'broker_url',
-    'celery.accept.content': 'accept_content',
-    'celery.always.eager': 'task_always_eager',
-    'celery.amqp.task.result.expires': 'result_expires',
-    'celeryd.concurrency': 'worker_concurrency',
-    'celeryd.max.tasks.per.child': 'worker_max_tasks_per_child',
-    #'celery.imports' ends up unchanged
-    'celery.result.backend': 'result_backend',
-    'celery.result.serializer': 'result_serializer',
-    'celery.task.serializer': 'task_serializer',
-}
-
-list_config_names = """imports accept_content""".split()
-
-
-desupported = set([
-    'celery.result.dburi',
-    'celery.result.serialier',
-    'celery.send.task.error.emails',
-])
-
-
-log = logging.getLogger(__name__)
-
-
-def make_celery_config(config):
-    """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
-
-    celery_config = CeleryConfig()
-
-    for config_key, config_value in sorted(config.items()):
-        if config_key in desupported and config_value:
-            log.error('Celery configuration setting %r is no longer supported', config_key)
-        celery_key = celery3_compat.get(config_key)
-        parts = config_key.split('.', 1)
-        if celery_key:  # explicit Celery 3 backwards compatibility
-            pass
-        elif parts[0] == 'celery' and len(parts) == 2:  # Celery 4 config key
-            celery_key = parts[1]
-        else:
-            continue
-        if not isinstance(config_value, str):
-            continue
-        if celery_key in list_config_names:
-            celery_value = config_value.split()
-        elif config_value.isdigit():
-            celery_value = int(config_value)
-        elif config_value.lower() in ['true', 'false']:
-            celery_value = config_value.lower() == 'true'
-        else:
-            celery_value = config_value
-        setattr(celery_config, celery_key, celery_value)
-    return celery_config
-
-
-def make_app():
-    """Create celery app from the TurboGears configuration file"""
-    app = celery.Celery()
-    celery_config = make_celery_config(tg.config)
-    kallithea.CELERY_EAGER = celery_config.task_always_eager
-    app.config_from_object(celery_config)
-    return app
--- a/kallithea/lib/colored_formatter.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/colored_formatter.py	Thu May 27 21:27:37 2021 +0200
@@ -13,6 +13,7 @@
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 import logging
+import sys
 
 
 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38)
@@ -65,15 +66,18 @@
     def __init__(self, *args, **kwargs):
         # can't do super(...) here because Formatter is an old school class
         logging.Formatter.__init__(self, *args, **kwargs)
+        self.plain = not getattr(sys.stderr, 'isatty', lambda: False)()
 
     def format(self, record):
         """
         Changes record's levelname to use with COLORS enum
         """
+        def_record = logging.Formatter.format(self, record)
+        if self.plain:
+            return def_record
 
         levelname = record.levelname
         start = COLOR_SEQ % (COLORS[levelname])
-        def_record = logging.Formatter.format(self, record)
         end = RESET_SEQ
 
         colored_record = ''.join([start, def_record, end])
@@ -85,14 +89,17 @@
     def __init__(self, *args, **kwargs):
         # can't do super(...) here because Formatter is an old school class
         logging.Formatter.__init__(self, *args, **kwargs)
+        self.plain = not getattr(sys.stderr, 'isatty', lambda: False)()
 
     def format(self, record):
         """
         Changes record's levelname to use with COLORS enum
         """
+        def_record = format_sql(logging.Formatter.format(self, record))
+        if self.plain:
+            return def_record
 
         start = COLOR_SEQ % (COLORS['SQL'])
-        def_record = format_sql(logging.Formatter.format(self, record))
         end = RESET_SEQ
 
         colored_record = ''.join([start, def_record, end])
--- a/kallithea/lib/compat.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/compat.py	Thu May 27 21:27:37 2021 +0200
@@ -56,7 +56,7 @@
 
     def kill(pid, sig):
         """kill function for Win32"""
-        kernel32 = ctypes.windll.kernel32
+        kernel32 = ctypes.windll.kernel32  # pytype: disable=module-attr
         handle = kernel32.OpenProcess(1, 0, pid)
         return (0 != kernel32.TerminateProcess(handle, 0))
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/conf.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.lib.conf
+~~~~~~~~~~~~~~~~~~
+
+Various config settings for Kallithea
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: Mar 7, 2012
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+"""
+
+from kallithea.lib import pygmentsutils
+
+
+# language map is also used by whoosh indexer, which for those specified
+# extensions will index it's content
+LANGUAGES_EXTENSIONS_MAP = pygmentsutils.get_extension_descriptions()
+
+# Whoosh index targets
+
+# Extensions we want to index content of using whoosh
+INDEX_EXTENSIONS = list(LANGUAGES_EXTENSIONS_MAP)
+
+# Filenames we want to index content of using whoosh
+INDEX_FILENAMES = pygmentsutils.get_index_filenames()
+
+# list of readme files to search in file tree and display in summary
+# attached weights defines the search  order lower is first
+ALL_READMES = [
+    ('readme', 0), ('README', 0), ('Readme', 0),
+    ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1),
+    ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2),
+    ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2),
+    ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2),
+]
+
+# extension together with weights to search lower is first
+RST_EXTS = [
+    ('', 0), ('.rst', 1), ('.rest', 1),
+    ('.RST', 2), ('.REST', 2),
+    ('.txt', 3), ('.TXT', 3)
+]
+
+MARKDOWN_EXTS = [
+    ('.md', 1), ('.MD', 1),
+    ('.mkdn', 2), ('.MKDN', 2),
+    ('.mdown', 3), ('.MDOWN', 3),
+    ('.markdown', 4), ('.MARKDOWN', 4)
+]
+
+PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)]
+
+ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS
--- a/kallithea/lib/db_manage.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/db_manage.py	Thu May 27 21:27:37 2021 +0200
@@ -26,6 +26,7 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
+import getpass
 import logging
 import os
 import sys
@@ -36,12 +37,10 @@
 import sqlalchemy
 from sqlalchemy.engine import create_engine
 
+from kallithea.lib.utils2 import ask_ok
+from kallithea.model import db, meta
 from kallithea.model.base import init_model
-from kallithea.model.db import Permission, RepoGroup, Repository, Setting, Ui, User, UserRepoGroupToPerm, UserToPerm
-#from kallithea.model import meta
-from kallithea.model.meta import Base, Session
 from kallithea.model.permission import PermissionModel
-from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user import UserModel
 
 
@@ -49,9 +48,8 @@
 
 
 class DbManage(object):
-    def __init__(self, dbconf, root, tests=False, SESSION=None, cli_args=None):
+    def __init__(self, dbconf, root, SESSION=None, cli_args=None):
         self.dbname = dbconf.split('/')[-1]
-        self.tests = tests
         self.root = root
         self.dburi = dbconf
         self.cli_args = cli_args or {}
@@ -62,7 +60,6 @@
         force_ask = self.cli_args.get('force_ask')
         if force_ask is not None:
             return force_ask
-        from kallithea.lib.utils2 import ask_ok
         return ask_ok(msg)
 
     def init_db(self, SESSION=None):
@@ -72,48 +69,49 @@
             # init new sessions
             engine = create_engine(self.dburi)
             init_model(engine)
-            self.sa = Session()
-
-    def create_tables(self, override=False):
-        """
-        Create a auth database
-        """
+            self.sa = meta.Session()
 
-        log.info("Any existing database is going to be destroyed")
-        if self.tests:
-            destroy = True
+    def create_tables(self, reuse_database=False):
+        """
+        Create database (optional) and tables.
+        If reuse_database is false, the database will be dropped (if it exists)
+        and a new one created. If true, the existing database will be reused
+        and cleaned for content.
+        """
+        url = sqlalchemy.engine.url.make_url(self.dburi)
+        database = url.database
+        if reuse_database:
+            log.info("The content of the database %r will be destroyed and new tables created." % database)
         else:
-            destroy = self._ask_ok('Are you sure to destroy old database ? [y/n]')
-        if not destroy:
+            log.info("The existing database %r will be destroyed and a new one created." % database)
+
+        if not self._ask_ok('Are you sure to destroy old database? [y/n]'):
             print('Nothing done.')
             sys.exit(0)
-        if destroy:
-            # drop and re-create old schemas
 
-            url = sqlalchemy.engine.url.make_url(self.dburi)
-            database = url.database
-
-            # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work
+        if reuse_database:
+            meta.Base.metadata.drop_all()
+        else:
             if url.drivername == 'mysql':
                 url.database = None  # don't connect to the database (it might not exist)
                 engine = sqlalchemy.create_engine(url)
                 with engine.connect() as conn:
-                    conn.execute('DROP DATABASE IF EXISTS ' + database)
-                    conn.execute('CREATE DATABASE ' + database)
+                    conn.execute('DROP DATABASE IF EXISTS `%s`' % database)
+                    conn.execute('CREATE DATABASE `%s` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci' % database)
             elif url.drivername == 'postgresql':
                 from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
                 url.database = 'postgres'  # connect to the system database (as the real one might not exist)
                 engine = sqlalchemy.create_engine(url)
                 with engine.connect() as conn:
                     conn.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
-                    conn.execute('DROP DATABASE IF EXISTS ' + database)
-                    conn.execute('CREATE DATABASE ' + database)
+                    conn.execute('DROP DATABASE IF EXISTS "%s"' % database)
+                    conn.execute('CREATE DATABASE "%s"' % database)
             else:
+                # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work, but this is
                 # known to work on SQLite - possibly not on other databases with strong referential integrity
-                Base.metadata.drop_all()
+                meta.Base.metadata.drop_all()
 
-        checkfirst = not override
-        Base.metadata.create_all(checkfirst=checkfirst)
+        meta.Base.metadata.create_all(checkfirst=False)
 
         # Create an Alembic configuration and generate the version table,
         # "stamping" it with the most recent Alembic migration revision, to
@@ -128,91 +126,36 @@
 
         log.info('Created tables for %s', self.dbname)
 
-    def fix_repo_paths(self):
-        """
-        Fixes a old kallithea version path into new one without a '*'
-        """
-
-        paths = Ui.query() \
-                .filter(Ui.ui_key == '/') \
-                .scalar()
-
-        paths.ui_value = paths.ui_value.replace('*', '')
-
-        self.sa.commit()
+    def create_admin_user(self):
+        username = self.cli_args.get('username')
+        password = self.cli_args.get('password')
+        email = self.cli_args.get('email')
 
-    def fix_default_user(self):
-        """
-        Fixes a old default user with some 'nicer' default values,
-        used mostly for anonymous access
-        """
-        def_user = User.query().filter_by(is_default_user=True).one()
-
-        def_user.name = 'Anonymous'
-        def_user.lastname = 'User'
-        def_user.email = 'anonymous@kallithea-scm.org'
-
-        self.sa.commit()
+        def get_password():
+            password = getpass.getpass('Specify admin password '
+                                       '(min 6 chars):')
+            confirm = getpass.getpass('Confirm password:')
 
-    def fix_settings(self):
-        """
-        Fixes kallithea settings adds ga_code key for google analytics
-        """
-
-        hgsettings3 = Setting('ga_code', '')
-
-        self.sa.add(hgsettings3)
-        self.sa.commit()
-
-    def admin_prompt(self, second=False):
-        if not self.tests:
-            import getpass
+            if password != confirm:
+                log.error('passwords mismatch')
+                return False
+            if len(password) < 6:
+                log.error('password is to short use at least 6 characters')
+                return False
 
-            username = self.cli_args.get('username')
-            password = self.cli_args.get('password')
-            email = self.cli_args.get('email')
-
-            def get_password():
-                password = getpass.getpass('Specify admin password '
-                                           '(min 6 chars):')
-                confirm = getpass.getpass('Confirm password:')
-
-                if password != confirm:
-                    log.error('passwords mismatch')
-                    return False
-                if len(password) < 6:
-                    log.error('password is to short use at least 6 characters')
-                    return False
-
-                return password
-            if username is None:
-                username = input('Specify admin username:')
-            if password is None:
+            return password
+        if username is None:
+            username = input('Specify admin username:')
+        if password is None:
+            password = get_password()
+            if not password:
+                # second try
                 password = get_password()
                 if not password:
-                    # second try
-                    password = get_password()
-                    if not password:
-                        sys.exit()
-            if email is None:
-                email = input('Specify admin email:')
-            self.create_user(username, password, email, True)
-        else:
-            log.info('creating admin and regular test users')
-            from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, \
-                TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
-                TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
-                TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
-                TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
-
-            self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
-                             TEST_USER_ADMIN_EMAIL, True)
-
-            self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
-                             TEST_USER_REGULAR_EMAIL, False)
-
-            self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
-                             TEST_USER_REGULAR2_EMAIL, False)
+                    sys.exit()
+        if email is None:
+            email = input('Specify admin email:')
+        self.create_user(username, password, email, True)
 
     def create_auth_plugin_options(self, skip_existing=False):
         """
@@ -223,10 +166,10 @@
 
         for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'),
                         ('auth_internal_enabled', 'True', 'bool')]:
-            if skip_existing and Setting.get_by_name(k) is not None:
+            if skip_existing and db.Setting.get_by_name(k) is not None:
                 log.debug('Skipping option %s', k)
                 continue
-            setting = Setting(k, v, t)
+            setting = db.Setting(k, v, t)
             self.sa.add(setting)
 
     def create_default_options(self, skip_existing=False):
@@ -238,51 +181,12 @@
             ('default_repo_private', False, 'bool'),
             ('default_repo_type', 'hg', 'unicode')
         ]:
-            if skip_existing and Setting.get_by_name(k) is not None:
+            if skip_existing and db.Setting.get_by_name(k) is not None:
                 log.debug('Skipping option %s', k)
                 continue
-            setting = Setting(k, v, t)
+            setting = db.Setting(k, v, t)
             self.sa.add(setting)
 
-    def fixup_groups(self):
-        def_usr = User.get_default_user()
-        for g in RepoGroup.query().all():
-            g.group_name = g.get_new_name(g.name)
-            # get default perm
-            default = UserRepoGroupToPerm.query() \
-                .filter(UserRepoGroupToPerm.group == g) \
-                .filter(UserRepoGroupToPerm.user == def_usr) \
-                .scalar()
-
-            if default is None:
-                log.debug('missing default permission for group %s adding', g)
-                RepoGroupModel()._create_default_perms(g)
-
-    def reset_permissions(self, username):
-        """
-        Resets permissions to default state, useful when old systems had
-        bad permissions, we must clean them up
-
-        :param username:
-        """
-        default_user = User.get_by_username(username)
-        if not default_user:
-            return
-
-        u2p = UserToPerm.query() \
-            .filter(UserToPerm.user == default_user).all()
-        fixed = False
-        if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
-            for p in u2p:
-                Session().delete(p)
-            fixed = True
-            self.populate_default_permissions()
-        return fixed
-
-    def update_repo_info(self):
-        for repo in Repository.query():
-            repo.update_changeset_cache()
-
     def prompt_repo_root_path(self, test_repo_path='', retries=3):
         _path = self.cli_args.get('repos_location')
         if retries == 3:
@@ -290,7 +194,7 @@
 
         if _path is not None:
             path = _path
-        elif not self.tests and not test_repo_path:
+        elif not test_repo_path:
             path = input(
                  'Enter a valid absolute path to store repositories. '
                  'All repositories in that path will be added automatically:'
@@ -340,15 +244,14 @@
         ui_config = [
             ('paths', '/', repo_root_path, True),
             #('phases', 'publish', 'false', False)
-            ('hooks', Ui.HOOK_UPDATE, 'hg update >&2', False),
-            ('hooks', Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True),
+            ('hooks', db.Ui.HOOK_UPDATE, 'python:', False),  # the actual value in db doesn't matter
+            ('hooks', db.Ui.HOOK_REPO_SIZE, 'python:', True),  # the actual value in db doesn't matter
             ('extensions', 'largefiles', '', True),
             ('largefiles', 'usercache', os.path.join(repo_root_path, '.cache', 'largefiles'), True),
-            ('extensions', 'hgsubversion', '', False),
             ('extensions', 'hggit', '', False),
         ]
         for ui_section, ui_key, ui_value, ui_active in ui_config:
-            ui_conf = Ui(
+            ui_conf = db.Ui(
                 ui_section=ui_section,
                 ui_key=ui_key,
                 ui_value=ui_value,
@@ -366,12 +269,12 @@
             ('admin_grid_items', 25, 'int'),
             ('show_version', True, 'bool'),
             ('use_gravatar', True, 'bool'),
-            ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
-            ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
-            ('clone_ssh_tmpl', Repository.DEFAULT_CLONE_SSH, 'unicode'),
+            ('gravatar_url', db.User.DEFAULT_GRAVATAR_URL, 'unicode'),
+            ('clone_uri_tmpl', db.Repository.DEFAULT_CLONE_URI, 'unicode'),
+            ('clone_ssh_tmpl', db.Repository.DEFAULT_CLONE_SSH, 'unicode'),
         ]
         for key, val, type_ in settings:
-            sett = Setting(key, val, type_)
+            sett = db.Setting(key, val, type_)
             self.sa.add(sett)
 
         self.create_auth_plugin_options()
@@ -384,12 +287,12 @@
         UserModel().create_or_update(username, password, email,
                                      firstname='Kallithea', lastname='Admin',
                                      active=True, admin=admin,
-                                     extern_type=User.DEFAULT_AUTH_TYPE)
+                                     extern_type=db.User.DEFAULT_AUTH_TYPE)
 
     def create_default_user(self):
         log.info('creating default user')
         # create default user for handling default permissions.
-        user = UserModel().create_or_update(username=User.DEFAULT_USER_NAME,
+        user = UserModel().create_or_update(username=db.User.DEFAULT_USER_NAME,
                                             password=str(uuid.uuid1())[:20],
                                             email='anonymous@kallithea-scm.org',
                                             firstname='Anonymous',
@@ -399,7 +302,7 @@
         if self.cli_args.get('public_access') is False:
             log.info('Public access disabled')
             user.active = False
-            Session().commit()
+            meta.Session().commit()
 
     def create_permissions(self):
         """
@@ -416,4 +319,4 @@
         permissions that are missing, and not alter already defined ones
         """
         log.info('creating default user permissions')
-        PermissionModel().create_default_permissions(user=User.DEFAULT_USER_NAME)
+        PermissionModel().create_default_permissions(user=db.User.DEFAULT_USER_NAME)
--- a/kallithea/lib/diffs.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/diffs.py	Thu May 27 21:27:37 2021 +0200
@@ -31,7 +31,7 @@
 
 from tg.i18n import ugettext as _
 
-from kallithea.lib import helpers as h
+from kallithea.lib import webutils
 from kallithea.lib.utils2 import safe_str
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import VCSError
@@ -63,96 +63,84 @@
     return idstring
 
 
-def as_html(table_class='code-difftable', line_class='line',
+def as_html(parsed_lines, table_class='code-difftable', line_class='line',
             old_lineno_class='lineno old', new_lineno_class='lineno new',
             no_lineno_class='lineno',
-            code_class='code', enable_comments=False, parsed_lines=None):
+            code_class='code'):
     """
     Return given diff as html table with customized css classes
     """
-    def _link_to_if(condition, label, url):
-        """
-        Generates a link if condition is meet or just the label if not.
-        """
-
-        if condition:
-            return '''<a href="%(url)s" data-pseudo-content="%(label)s"></a>''' % {
-                'url': url,
-                'label': label
-            }
-        else:
-            return label
-
     _html_empty = True
     _html = []
     _html.append('''<table class="%(table_class)s">\n''' % {
         'table_class': table_class
     })
 
-    for diff in parsed_lines:
-        for line in diff['chunks']:
+    for file_info in parsed_lines:
+        for chunk in file_info['chunks']:
             _html_empty = False
-            for change in line:
+            for change in chunk:
                 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
                     'lc': line_class,
                     'action': change['action']
                 })
-                anchor_old_id = ''
-                anchor_new_id = ''
-                anchor_old = "%(filename)s_o%(oldline_no)s" % {
-                    'filename': _safe_id(diff['filename']),
-                    'oldline_no': change['old_lineno']
-                }
-                anchor_new = "%(filename)s_n%(oldline_no)s" % {
-                    'filename': _safe_id(diff['filename']),
-                    'oldline_no': change['new_lineno']
-                }
-                cond_old = (change['old_lineno'] != '...' and
-                            change['old_lineno'])
-                cond_new = (change['new_lineno'] != '...' and
-                            change['new_lineno'])
-                no_lineno = (change['old_lineno'] == '...' and
-                             change['new_lineno'] == '...')
-                if cond_old:
-                    anchor_old_id = 'id="%s"' % anchor_old
-                if cond_new:
-                    anchor_new_id = 'id="%s"' % anchor_new
-                ###########################################################
-                # OLD LINE NUMBER
-                ###########################################################
-                _html.append('''\t<td %(a_id)s class="%(olc)s" %(colspan)s>''' % {
-                    'a_id': anchor_old_id,
-                    'olc': no_lineno_class if no_lineno else old_lineno_class,
-                    'colspan': 'colspan="2"' if no_lineno else ''
-                })
-
-                _html.append('''%(link)s''' % {
-                    'link': _link_to_if(not no_lineno, change['old_lineno'],
-                                        '#%s' % anchor_old)
-                })
-                _html.append('''</td>\n''')
-                ###########################################################
-                # NEW LINE NUMBER
-                ###########################################################
-
-                if not no_lineno:
+                if change['old_lineno'] or change['new_lineno']:
+                    ###########################################################
+                    # OLD LINE NUMBER
+                    ###########################################################
+                    anchor_old = "%(filename)s_o%(oldline_no)s" % {
+                        'filename': _safe_id(file_info['filename']),
+                        'oldline_no': change['old_lineno']
+                    }
+                    anchor_old_id = ''
+                    if change['old_lineno']:
+                        anchor_old_id = 'id="%s"' % anchor_old
+                    _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
+                        'a_id': anchor_old_id,
+                        'olc': old_lineno_class,
+                    })
+                    _html.append('''<a href="%(url)s" data-pseudo-content="%(label)s"></a>''' % {
+                        'label': change['old_lineno'],
+                        'url': '#%s' % anchor_old,
+                    })
+                    _html.append('''</td>\n''')
+                    ###########################################################
+                    # NEW LINE NUMBER
+                    ###########################################################
+                    anchor_new = "%(filename)s_n%(newline_no)s" % {
+                        'filename': _safe_id(file_info['filename']),
+                        'newline_no': change['new_lineno']
+                    }
+                    anchor_new_id = ''
+                    if change['new_lineno']:
+                        anchor_new_id = 'id="%s"' % anchor_new
                     _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
                         'a_id': anchor_new_id,
                         'nlc': new_lineno_class
                     })
-
-                    _html.append('''%(link)s''' % {
-                        'link': _link_to_if(True, change['new_lineno'],
-                                            '#%s' % anchor_new)
+                    _html.append('''<a href="%(url)s" data-pseudo-content="%(label)s"></a>''' % {
+                        'label': change['new_lineno'],
+                        'url': '#%s' % anchor_new,
+                    })
+                    _html.append('''</td>\n''')
+                else:
+                    ###########################################################
+                    # NO LINE NUMBER
+                    ###########################################################
+                    anchor = "%(filename)s_%(context_lineno)s" % {
+                        'filename': _safe_id(file_info['filename']),
+                        'context_lineno': change['context_lineno'],
+                    }
+                    _html.append('''\t<td id="%(anchor)s" class="%(olc)s" colspan="2">''' % {
+                        'anchor': anchor,
+                        'olc': no_lineno_class,
                     })
                     _html.append('''</td>\n''')
                 ###########################################################
                 # CODE
                 ###########################################################
-                comments = '' if enable_comments else 'no-comment'
-                _html.append('''\t<td class="%(cc)s %(inc)s">''' % {
+                _html.append('''\t<td class="%(cc)s">''' % {
                     'cc': code_class,
-                    'inc': comments
                 })
                 _html.append('''\n\t\t<div class="add-bubble"><div>&nbsp;</div></div><pre>%(code)s</pre>\n''' % {
                     'code': change['line']
@@ -168,21 +156,20 @@
 
 def wrap_to_table(html):
     """Given a string with html, return it wrapped in a table, similar to what
-    DiffProcessor returns."""
+    as_html returns."""
     return '''\
               <table class="code-difftable">
-                <tr class="line no-comment">
+                <tr class="line">
                 <td class="lineno new"></td>
-                <td class="code no-comment"><pre>%s</pre></td>
+                <td class="code"><pre>%s</pre></td>
                 </tr>
               </table>''' % html
 
 
-def wrapped_diff(filenode_old, filenode_new, diff_limit=None,
-                ignore_whitespace=True, line_context=3,
-                enable_comments=False):
+def html_diff(filenode_old, filenode_new, diff_limit=None,
+                ignore_whitespace=True, line_context=3):
     """
-    Returns a file diff wrapped into a table.
+    Returns a file diff as HTML wrapped into a table.
     Checks for diff_limit and presents a message if the diff is too big.
     """
     if filenode_old is None:
@@ -201,13 +188,13 @@
         raw_diff = get_gitdiff(filenode_old, filenode_new,
                                 ignore_whitespace=ignore_whitespace,
                                 context=line_context)
-        diff_processor = DiffProcessor(raw_diff)
+        diff_processor = DiffProcessor(raw_diff, html=True)
         if diff_processor.parsed: # there should be exactly one element, for the specified file
             f = diff_processor.parsed[0]
             op = f['operation']
             a_path = f['old_filename']
 
-        html_diff = as_html(parsed_lines=diff_processor.parsed, enable_comments=enable_comments)
+        html_diff = as_html(parsed_lines=diff_processor.parsed)
         stats = diff_processor.stat()
 
     else:
@@ -218,7 +205,7 @@
     if not html_diff:
         submodules = [o for o in [filenode_new, filenode_old] if isinstance(o, SubModuleNode)]
         if submodules:
-            html_diff = wrap_to_table(h.escape('Submodule %r' % submodules[0]))
+            html_diff = wrap_to_table(webutils.escape('Submodule %r' % submodules[0]))
         else:
             html_diff = wrap_to_table(_('No changes detected'))
 
@@ -260,7 +247,7 @@
         return scm_instance.get_diff(rev1, rev2, path=path,
                                      ignore_whitespace=ignore_whitespace, context=context)
     except MemoryError:
-        h.flash('MemoryError: Diff is too big', category='error')
+        webutils.flash('MemoryError: Diff is too big', category='error')
         return b''
 
 
@@ -277,11 +264,11 @@
     """
     Give it a unified or git diff and it returns a list of the files that were
     mentioned in the diff together with a dict of meta information that
-    can be used to render it in a HTML template.
+    can be used to render it in a HTML template or as text.
     """
     _diff_git_re = re.compile(b'^diff --git', re.MULTILINE)
 
-    def __init__(self, diff, vcs='hg', diff_limit=None, inline_diff=True):
+    def __init__(self, diff, vcs='hg', diff_limit=None, html=True):
         """
         :param diff:   a text in diff format
         :param vcs: type of version control hg or git
@@ -298,9 +285,9 @@
         self.diff_limit = diff_limit
         self.limited_diff = False
         self.vcs = vcs
-        self.parsed = self._parse_gitdiff(inline_diff=inline_diff)
+        self.parsed = self._parse_gitdiff(html=html)
 
-    def _parse_gitdiff(self, inline_diff):
+    def _parse_gitdiff(self, html):
         """Parse self._diff and return a list of dicts with meta info and chunks for each file.
         Might set limited_diff.
         Optionally, do an extra pass and to extra markup of one-liner changes.
@@ -392,13 +379,27 @@
                 # not with the current UI
                 chunks = []
 
-            chunks.insert(0, [{
-                'old_lineno': '',
-                'new_lineno': '',
-                'action':     'context',
-                'line':       msg,
-                } for _op, msg in stats['ops'].items()
-                  if _op not in [MOD_FILENODE]])
+            # show helpful additional texts for mode change and renaming, but not for plain 'modified file'
+            msgs = [
+                {
+                    'old_lineno': '',
+                    'new_lineno': '',
+                    'action': 'context',
+                    'line': msg,
+                }
+                for op_, msg in stats['ops'].items()
+                if op_ != MOD_FILENODE
+            ]
+            if msgs:
+                chunks.insert(0, msgs)
+
+            # enumerate 'context' lines that don't have new/old line numbers so they can be commented on
+            context_lineno = 0
+            for chunk in chunks:
+                for change in chunk:
+                    if not change['old_lineno'] and not change['new_lineno']:
+                        change['context_lineno'] = context_lineno
+                        context_lineno += 1
 
             _files.append({
                 'old_filename':     head['a_path'],
@@ -410,12 +411,14 @@
                 'stats':            stats,
             })
 
-        if not inline_diff:
+        if not html:
             return _files
 
-        # highlight inline changes when one del is followed by one add
         for diff_data in _files:
             for chunk in diff_data['chunks']:
+                for change in chunk:
+                    change['line'] = _escaper(change['line'])
+                # highlight inline changes when one del is followed by one add
                 lineiter = iter(chunk)
                 try:
                     peekline = next(lineiter)
@@ -453,12 +456,49 @@
         return self.adds, self.removes
 
 
-_escape_re = re.compile(r'(&)|(<)|(>)|(\t)(\n|$)?|(\r)|(?<=.)( \n| $)')
+_escape_re = re.compile(r'(&)|(<)|(>)|(\t)($)?|(\r)|( $)')
 
 
-def _escaper(string):
-    """
-    Do HTML escaping/markup
+def _escaper(diff_line):
+    r"""
+    Do HTML escaping/markup of a single diff line (excluding first +/- column)
+
+    >>> _escaper('foobar')
+    'foobar'
+    >>> _escaper('@foo & bar')
+    '@foo &amp; bar'
+    >>> _escaper('foo < bar')
+    'foo &lt; bar'
+    >>> _escaper('foo > bar')
+    'foo &gt; bar'
+    >>> _escaper('<foo>')
+    '&lt;foo&gt;'
+    >>> _escaper('foo\tbar')
+    'foo<u>\t</u>bar'
+    >>> _escaper('foo\rbar\r')
+    'foo<u class="cr"></u>bar<u class="cr"></u>'
+    >>> _escaper('foo\t')
+    'foo<u>\t</u><i></i>'
+    >>> _escaper('foo ')
+    'foo <i></i>'
+    >>> _escaper('foo  ')
+    'foo  <i></i>'
+    >>> _escaper('')
+    ''
+    >>> _escaper(' ')
+    ' <i></i>'
+    >>> _escaper('\t')
+    '<u>\t</u><i></i>'
+    >>> _escaper('\t  ')
+    '<u>\t</u>  <i></i>'
+    >>> _escaper('  \t')
+    '  <u>\t</u><i></i>'
+    >>> _escaper('\t\t  ')
+    '<u>\t</u><u>\t</u>  <i></i>'
+    >>> _escaper('  \t\t')
+    '  <u>\t</u><u>\t</u><i></i>'
+    >>> _escaper('foo&bar<baz>  ')
+    'foo&amp;bar&lt;baz&gt;  <i></i>'
     """
 
     def substitute(m):
@@ -479,11 +519,11 @@
             return ' <i></i>'
         assert False
 
-    return _escape_re.sub(substitute, safe_str(string))
+    return _escape_re.sub(substitute, diff_line)
 
 
 _git_header_re = re.compile(br"""
-    ^diff[ ]--git[ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
+    ^diff[ ]--git[ ](?P<a_path_quote>"?)a/(?P<a_path>.+?)(?P=a_path_quote)[ ](?P<b_path_quote>"?)b/(?P<b_path>.+?)(?P=a_path_quote)\n
     (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
        ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
     (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
@@ -494,8 +534,8 @@
     (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
         \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
     (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
-    (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
-    (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
+    (?:^---[ ](?P<a_file_quote>"?)(a/(?P<a_file>.+?)(?P=a_file_quote)|/dev/null)\t?(?:\n|$))?
+    (?:^\+\+\+[ ](?P<b_file_quote>"?)(b/(?P<b_file>.+?)(?P=b_file_quote)|/dev/null)\t?(?:\n|$))?
 """, re.VERBOSE | re.MULTILINE)
 
 
@@ -521,6 +561,19 @@
 _header_next_check = re.compile(br'''(?!@)(?!literal )(?!delta )''')
 
 
+_git_bs_escape_re = re.compile(r'\\(?:([^0-9])|([0-9]{3}))')
+
+
+_git_bs_escape_dict = {'\\': '\\', '"': '"', 'r': '\r', 'n': '\n', 't': '\t'}
+
+
+def _git_bs_unescape_m(m):
+    c = m.group(1)
+    if c is not None:
+        return _git_bs_escape_dict.get(c) or ('\\' + c)
+    return chr(int(m.group(2), 8))
+
+
 def _get_header(vcs, diff_chunk):
     """
     Parses a Git diff for a single file (header and chunks) and returns a tuple with:
@@ -539,12 +592,21 @@
     elif vcs == 'hg':
         match = _hg_header_re.match(diff_chunk)
     if match is None:
-        raise Exception('diff not recognized as valid %s diff' % vcs)
+        raise Exception('diff not recognized as valid %s diff: %r' % (vcs, safe_str(bytes(diff_chunk[:1000]))))
     meta_info = {k: None if v is None else safe_str(v) for k, v in match.groupdict().items()}
+    if vcs == 'git':
+        for k in ['a_path', 'b_path', 'a_file', 'b_file']:
+            v = meta_info.get(k)
+            if v:
+                meta_info[k] = _git_bs_escape_re.sub(_git_bs_unescape_m, v)
     rest = diff_chunk[match.end():]
-    if rest and _header_next_check.match(rest):
-        raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, safe_str(bytes(diff_chunk[:match.end()])), safe_str(bytes(rest[:1000]))))
-    diff_lines = (_escaper(m.group(0)) for m in re.finditer(br'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
+    if rest:
+        if _header_next_check.match(rest):
+            raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, safe_str(bytes(diff_chunk[:match.end()])), safe_str(bytes(rest[:1000]))))
+        if rest[-1:] != b'\n':
+            # The diff will generally already have trailing \n (and be a memoryview). It might also be huge so we don't want to allocate it twice. But in this very rare case, we don't care.
+            rest = bytes(rest) + b'\n'
+    diff_lines = (safe_str(m.group(1)) for m in re.finditer(br'(.*)\n', rest))
     return meta_info, diff_lines
 
 
@@ -579,19 +641,17 @@
             old_line -= 1
             new_line -= 1
 
-            context = len(gr) == 5
             old_end += old_line
             new_end += new_line
 
-            if context:
-                # skip context only if it's first line
-                if int(gr[0]) > 1:
-                    lines.append({
-                        'old_lineno': '...',
-                        'new_lineno': '...',
-                        'action':     'context',
-                        'line':       line,
-                    })
+            # include '@@' line if it gives a line number hint or separate chunks - not if the chunk starts at start of file like '@@ -1,7 +1,7 @@'
+            if int(gr[0]) > 1:
+                lines.append({
+                    'old_lineno': '',
+                    'new_lineno': '',
+                    'action':     'context',
+                    'line':       line,
+                })
 
             line = next(diff_lines)
 
@@ -616,15 +676,14 @@
                 else:
                     raise Exception('error parsing diff - unknown command in line %r at -%s+%s' % (line, old_line, new_line))
 
-                if not _newline_marker.match(line):
-                    old_line += affects_old
-                    new_line += affects_new
-                    lines.append({
-                        'old_lineno':   affects_old and old_line or '',
-                        'new_lineno':   affects_new and new_line or '',
-                        'action':       action,
-                        'line':         line[1:],
-                    })
+                old_line += affects_old
+                new_line += affects_new
+                lines.append({
+                    'old_lineno':   affects_old and old_line or '',
+                    'new_lineno':   affects_new and new_line or '',
+                    'action':       action,
+                    'line':         line[1:],
+                })
 
                 line = next(diff_lines)
 
@@ -632,8 +691,8 @@
                     # we need to append to lines, since this is not
                     # counted in the line specs of diff
                     lines.append({
-                        'old_lineno':   '...',
-                        'new_lineno':   '...',
+                        'old_lineno':   '',
+                        'new_lineno':   '',
                         'action':       'context',
                         'line':         line,
                     })
--- a/kallithea/lib/exceptions.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/exceptions.py	Thu May 27 21:27:37 2021 +0200
@@ -74,8 +74,5 @@
     pass
 
 
-class HgsubversionImportError(Exception):
-    pass
-
 class InvalidCloneUriException(Exception):
     pass
--- a/kallithea/lib/helpers.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/helpers.py	Thu May 27 21:27:37 2021 +0200
@@ -18,9 +18,7 @@
 available to Controllers. This module is available to both as 'h'.
 """
 import hashlib
-import json
 import logging
-import random
 import re
 import textwrap
 import urllib.parse
@@ -28,223 +26,135 @@
 from beaker.cache import cache_region
 from pygments import highlight as code_highlight
 from pygments.formatters.html import HtmlFormatter
+from tg import tmpl_context as c
 from tg.i18n import ugettext as _
-from webhelpers2.html import HTML, escape, literal
-from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form
-from webhelpers2.html.tags import form as insecure_form
-from webhelpers2.html.tags import hidden, link_to, password, radio
-from webhelpers2.html.tags import select as webhelpers2_select
-from webhelpers2.html.tags import submit, text, textarea
-from webhelpers2.number import format_byte_size
-from webhelpers2.text import chop_at, truncate, wrap_paragraphs
 
-from kallithea.config.routing import url
+import kallithea
 from kallithea.lib.annotate import annotate_highlight
-#==============================================================================
-# PERMS
-#==============================================================================
 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel
-from kallithea.lib.markup_renderer import url_re
+from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE
 from kallithea.lib.pygmentsutils import get_custom_lexer
-from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
-from kallithea.lib.utils2 import age as _age
-from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime
+from kallithea.lib.utils2 import AttributeDict, asbool, credentials_filter, link_to_ref, safe_bytes, safe_int, safe_str, time_to_datetime
 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
-#==============================================================================
-# SCM FILTERS available via h.
-#==============================================================================
 from kallithea.lib.vcs.utils import author_email, author_name
+from kallithea.lib.webutils import (HTML, Option, age, canonical_url, checkbox, chop_at, end_form, escape, fmt_date, form, format_byte_size, hidden, js, jshtml,
+                                    link_to, literal, password, pop_flash_messages, radio, render_w_mentions, reset, safeid, select, session_csrf_secret_name,
+                                    session_csrf_secret_token, shorter, submit, text, textarea, url, urlify_text, wrap_paragraphs)
+from kallithea.model import db
+from kallithea.model.changeset_status import ChangesetStatusModel
 
 
 # mute pyflakes "imported but unused"
+# from webutils
 assert Option
+assert age
+assert canonical_url
 assert checkbox
+assert chop_at
 assert end_form
+assert fmt_date
+assert form
+assert format_byte_size
+assert hidden
+assert js
+assert jshtml
 assert password
+assert pop_flash_messages
 assert radio
+assert render_w_mentions
+assert reset
+assert safeid
+assert select
+assert session_csrf_secret_name
+assert session_csrf_secret_token
+assert shorter
 assert submit
 assert text
 assert textarea
-assert format_byte_size
-assert chop_at
+assert urlify_text
 assert wrap_paragraphs
+# from kallithea.lib.auth
 assert HasPermissionAny
 assert HasRepoGroupPermissionLevel
 assert HasRepoPermissionLevel
+# from utils2
+assert credentials_filter
+assert link_to_ref
 assert time_to_datetime
+# from vcs
 assert EmptyChangeset
 
 
 log = logging.getLogger(__name__)
 
 
-def canonical_url(*args, **kargs):
-    '''Like url(x, qualified=True), but returns url that not only is qualified
-    but also canonical, as configured in canonical_url'''
-    from kallithea import CONFIG
-    try:
-        parts = CONFIG.get('canonical_url', '').split('://', 1)
-        kargs['host'] = parts[1]
-        kargs['protocol'] = parts[0]
-    except IndexError:
-        kargs['qualified'] = True
-    return url(*args, **kargs)
-
-
-def canonical_hostname():
-    '''Return canonical hostname of system'''
-    from kallithea import CONFIG
-    try:
-        parts = CONFIG.get('canonical_url', '').split('://', 1)
-        return parts[1].split('/', 1)[0]
-    except IndexError:
-        parts = url('home', qualified=True).split('://', 1)
-        return parts[1].split('/', 1)[0]
-
-
-def html_escape(s):
-    """Return string with all html escaped.
-    This is also safe for javascript in html but not necessarily correct.
-    """
-    return (s
-        .replace('&', '&amp;')
-        .replace(">", "&gt;")
-        .replace("<", "&lt;")
-        .replace('"', "&quot;")
-        .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
-        )
-
-def js(value):
-    """Convert Python value to the corresponding JavaScript representation.
-
-    This is necessary to safely insert arbitrary values into HTML <script>
-    sections e.g. using Mako template expression substitution.
-
-    Note: Rather than using this function, it's preferable to avoid the
-    insertion of values into HTML <script> sections altogether. Instead,
-    data should (to the extent possible) be passed to JavaScript using
-    data attributes or AJAX calls, eliminating the need for JS specific
-    escaping.
-
-    Note: This is not safe for use in attributes (e.g. onclick), because
-    quotes are not escaped.
-
-    Because the rules for parsing <script> varies between XHTML (where
-    normal rules apply for any special characters) and HTML (where
-    entities are not interpreted, but the literal string "</script>"
-    is forbidden), the function ensures that the result never contains
-    '&', '<' and '>', thus making it safe in both those contexts (but
-    not in attributes).
-    """
-    return literal(
-        ('(' + json.dumps(value) + ')')
-        # In JSON, the following can only appear in string literals.
-        .replace('&', r'\x26')
-        .replace('<', r'\x3c')
-        .replace('>', r'\x3e')
-    )
-
-
-def jshtml(val):
-    """HTML escapes a string value, then converts the resulting string
-    to its corresponding JavaScript representation (see `js`).
-
-    This is used when a plain-text string (possibly containing special
-    HTML characters) will be used by a script in an HTML context (e.g.
-    element.innerHTML or jQuery's 'html' method).
-
-    If in doubt, err on the side of using `jshtml` over `js`, since it's
-    better to escape too much than too little.
-    """
-    return js(escape(val))
-
-
-def shorter(s, size=20, firstline=False, postfix='...'):
-    """Truncate s to size, including the postfix string if truncating.
-    If firstline, truncate at newline.
-    """
-    if firstline:
-        s = s.split('\n', 1)[0].rstrip()
-    if len(s) > size:
-        return s[:size - len(postfix)] + postfix
-    return s
-
-
-def reset(name, value, id=NotGiven, **attrs):
-    """Create a reset button, similar to webhelpers2.html.tags.submit ."""
-    return _input("reset", name, value, id, attrs)
-
-
-def select(name, selected_values, options, id=NotGiven, **attrs):
-    """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
-    if isinstance(options, list):
-        option_list = options
-        # Handle old value,label lists ... where value also can be value,label lists
-        options = Options()
-        for x in option_list:
-            if isinstance(x, tuple) and len(x) == 2:
-                value, label = x
-            elif isinstance(x, str):
-                value = label = x
-            else:
-                log.error('invalid select option %r', x)
-                raise
-            if isinstance(value, list):
-                og = options.add_optgroup(label)
-                for x in value:
-                    if isinstance(x, tuple) and len(x) == 2:
-                        group_value, group_label = x
-                    elif isinstance(x, str):
-                        group_value = group_label = x
-                    else:
-                        log.error('invalid select option %r', x)
-                        raise
-                    og.add_option(group_label, group_value)
-            else:
-                options.add_option(label, value)
-    return webhelpers2_select(name, selected_values, options, id=id, **attrs)
-
-
-safeid = _make_safe_id_component
-
-
 def FID(raw_id, path):
     """
     Creates a unique ID for filenode based on it's hash of path and revision
     it's safe to use in urls
-
-    :param raw_id:
-    :param path:
     """
-
     return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
 
 
-class _FilesBreadCrumbs(object):
+def get_ignore_whitespace_diff(GET):
+    """Return true if URL requested whitespace to be ignored"""
+    return bool(GET.get('ignorews'))
+
 
-    def __call__(self, repo_name, rev, paths):
-        url_l = [link_to(repo_name, url('files_home',
-                                        repo_name=repo_name,
-                                        revision=rev, f_path=''),
-                         class_='ypjax-link')]
-        paths_l = paths.split('/')
-        for cnt, p in enumerate(paths_l):
-            if p != '':
-                url_l.append(link_to(p,
-                                     url('files_home',
-                                         repo_name=repo_name,
-                                         revision=rev,
-                                         f_path='/'.join(paths_l[:cnt + 1])
-                                         ),
-                                     class_='ypjax-link'
-                                     )
-                             )
+def ignore_whitespace_link(GET, anchor=None):
+    """Return snippet with link to current URL with whitespace ignoring toggled"""
+    params = dict(GET)  # ignoring duplicates
+    if get_ignore_whitespace_diff(GET):
+        params.pop('ignorews')
+        title = _("Show whitespace changes")
+    else:
+        params['ignorews'] = '1'
+        title = _("Ignore whitespace changes")
+    params['anchor'] = anchor
+    return link_to(
+        literal('<i class="icon-strike"></i>'),
+        url.current(**params),
+        title=title,
+        **{'data-toggle': 'tooltip'})
 
-        return literal('/'.join(url_l))
+
+def get_diff_context_size(GET):
+    """Return effective context size requested in URL"""
+    return safe_int(GET.get('context'), default=3)
 
 
-files_breadcrumbs = _FilesBreadCrumbs()
+def increase_context_link(GET, anchor=None):
+    """Return snippet with link to current URL with double context size"""
+    context = get_diff_context_size(GET) * 2
+    params = dict(GET)  # ignoring duplicates
+    params['context'] = str(context)
+    params['anchor'] = anchor
+    return link_to(
+        literal('<i class="icon-sort"></i>'),
+        url.current(**params),
+        title=_('Increase diff context to %(num)s lines') % {'num': context},
+        **{'data-toggle': 'tooltip'})
+
+
+def files_breadcrumbs(repo_name, rev, paths):
+    url_l = [link_to(repo_name, url('files_home',
+                                    repo_name=repo_name,
+                                    revision=rev, f_path=''),
+                     class_='ypjax-link')]
+    paths_l = paths.split('/')
+    for cnt, p in enumerate(paths_l):
+        if p != '':
+            url_l.append(link_to(p,
+                                 url('files_home',
+                                     repo_name=repo_name,
+                                     revision=rev,
+                                     f_path='/'.join(paths_l[:cnt + 1])
+                                     ),
+                                 class_='ypjax-link'
+                                 )
+                         )
+    return literal('/'.join(url_l))
 
 
 class CodeHtmlFormatter(HtmlFormatter):
@@ -431,91 +341,12 @@
     return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
 
 
-class _Message(object):
-    """A message returned by ``pop_flash_messages()``.
-
-    Converting the message to a string returns the message text. Instances
-    also have the following attributes:
-
-    * ``category``: the category specified when the message was created.
-    * ``message``: the html-safe message text.
-    """
-
-    def __init__(self, category, message):
-        self.category = category
-        self.message = message
-
-
-def _session_flash_messages(append=None, clear=False):
-    """Manage a message queue in tg.session: return the current message queue
-    after appending the given message, and possibly clearing the queue."""
-    key = 'flash'
-    from tg import session
-    if key in session:
-        flash_messages = session[key]
-    else:
-        if append is None:  # common fast path - also used for clearing empty queue
-            return []  # don't bother saving
-        flash_messages = []
-        session[key] = flash_messages
-    if append is not None and append not in flash_messages:
-        flash_messages.append(append)
-    if clear:
-        session.pop(key, None)
-    session.save()
-    return flash_messages
-
-
-def flash(message, category, logf=None):
-    """
-    Show a message to the user _and_ log it through the specified function
-
-    category: notice (default), warning, error, success
-    logf: a custom log function - such as log.debug
-
-    logf defaults to log.info, unless category equals 'success', in which
-    case logf defaults to log.debug.
-    """
-    assert category in ('error', 'success', 'warning'), category
-    if hasattr(message, '__html__'):
-        # render to HTML for storing in cookie
-        safe_message = str(message)
-    else:
-        # Apply str - the message might be an exception with __str__
-        # Escape, so we can trust the result without further escaping, without any risk of injection
-        safe_message = html_escape(str(message))
-    if logf is None:
-        logf = log.info
-        if category == 'success':
-            logf = log.debug
-
-    logf('Flash %s: %s', category, safe_message)
-
-    _session_flash_messages(append=(category, safe_message))
-
-
-def pop_flash_messages():
-    """Return all accumulated messages and delete them from the session.
-
-    The return value is a list of ``Message`` objects.
-    """
-    return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
-
-
-def age(x, y=False):
-    return _age(x, y)
-
 def capitalize(x):
     return x.capitalize()
 
-email = author_email
-
 def short_id(x):
     return x[:12]
 
-def hide_credentials(x):
-    return ''.join(credentials_filter(x))
-
 
 def show_id(cs):
     """
@@ -524,9 +355,8 @@
 
     :param cs: changeset instance
     """
-    from kallithea import CONFIG
-    def_len = safe_int(CONFIG.get('show_sha_length', 12))
-    show_rev = str2bool(CONFIG.get('show_revision_number', False))
+    def_len = safe_int(kallithea.CONFIG.get('show_sha_length', 12))
+    show_rev = asbool(kallithea.CONFIG.get('show_revision_number', False))
 
     raw_id = cs.raw_id[:def_len]
     if show_rev:
@@ -535,40 +365,13 @@
         return raw_id
 
 
-def fmt_date(date):
-    if date:
-        return date.strftime("%Y-%m-%d %H:%M:%S")
-    return ""
-
-
-def is_git(repository):
-    if hasattr(repository, 'alias'):
-        _type = repository.alias
-    elif hasattr(repository, 'repo_type'):
-        _type = repository.repo_type
-    else:
-        _type = repository
-    return _type == 'git'
-
-
-def is_hg(repository):
-    if hasattr(repository, 'alias'):
-        _type = repository.alias
-    elif hasattr(repository, 'repo_type'):
-        _type = repository.repo_type
-    else:
-        _type = repository
-    return _type == 'hg'
-
-
 @cache_region('long_term', 'user_attr_or_none')
 def user_attr_or_none(author, show_attr):
     """Try to match email part of VCS committer string with a local user and return show_attr
     - or return None if user not found"""
     email = author_email(author)
     if email:
-        from kallithea.model.db import User
-        user = User.get_by_email(email)
+        user = db.User.get_by_email(email)
         if user is not None:
             return getattr(user, show_attr)
     return None
@@ -593,27 +396,21 @@
 
 
 def person(author, show_attr="username"):
-    """Find the user identified by 'author', return one of the users attributes,
+    """Find the user identified by 'author' string, return one of the users attributes,
     default to the username attribute, None if there is no user"""
-    from kallithea.model.db import User
-    # if author is already an instance use it for extraction
-    if isinstance(author, User):
-        return getattr(author, show_attr)
-
     value = user_attr_or_none(author, show_attr)
     if value is not None:
         return value
 
     # Still nothing?  Just pass back the author name if any, else the email
-    return author_name(author) or email(author)
+    return author_name(author) or author_email(author)
 
 
 def person_by_id(id_, show_attr="username"):
-    from kallithea.model.db import User
     # maybe it's an ID ?
     if str(id_).isdigit() or isinstance(id_, int):
         id_ = int(id_)
-        user = User.get(id_)
+        user = db.User.get(id_)
         if user is not None:
             return getattr(user, show_attr)
     return id_
@@ -791,9 +588,8 @@
         return group_name
 
     def get_pull_request():
-        from kallithea.model.db import PullRequest
         pull_request_id = action_params
-        nice_id = PullRequest.make_nice_id(pull_request_id)
+        nice_id = db.PullRequest.make_nice_id(pull_request_id)
 
         deleted = user_log.repository is None
         if deleted:
@@ -894,7 +690,6 @@
     and '_' changed to '-' and be used as attributes on the div. The default
     class is 'gravatar'.
     """
-    from tg import tmpl_context as c
     if not c.visual.use_gravatar:
         return ''
     if 'div_class' not in div_attributes:
@@ -916,7 +711,6 @@
     empty then we fallback to using an icon.
 
     """
-    from tg import tmpl_context as c
     if not c.visual.use_gravatar:
         return ''
 
@@ -932,16 +726,12 @@
     else:
         # if src is empty then there was no gravatar, so we use a font icon
         html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
-            .format(cls=cls, size=size, src=src))
+            .format(cls=cls, size=size))
 
     return literal(html)
 
 
 def gravatar_url(email_address, size=30, default=''):
-    # doh, we need to re-import those to mock it later
-    from kallithea.config.routing import url
-    from kallithea.model.db import User
-    from tg import tmpl_context as c
     if not c.visual.use_gravatar:
         return ""
 
@@ -952,13 +742,12 @@
         return default
 
     parsed_url = urllib.parse.urlparse(url.current(qualified=True))
-    url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
+    return (c.visual.gravatar_url or db.User.DEFAULT_GRAVATAR_URL) \
                .replace('{email}', email_address) \
                .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
                .replace('{netloc}', parsed_url.netloc) \
                .replace('{scheme}', parsed_url.scheme) \
                .replace('{size}', str(size))
-    return url
 
 
 def changed_tooltip(nodes):
@@ -986,8 +775,6 @@
 
     :param stats: two element list of added/deleted lines of code
     """
-    from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
-        MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
 
     a, d = stats['added'], stats['deleted']
     width = 100
@@ -1050,253 +837,16 @@
     return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
 
 
-_URLIFY_RE = re.compile(r'''
-# URL markup
-(?P<url>%s) |
-# @mention markup
-(?P<mention>%s) |
-# Changeset hash markup
-(?<!\w|[-_])
-  (?P<hash>[0-9a-f]{12,40})
-(?!\w|[-_]) |
-# Markup of *bold text*
-(?:
-  (?:^|(?<=\s))
-  (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
-  (?![*\w])
-) |
-# "Stylize" markup
-\[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
-\[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
-\[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
-\[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
-\[(?P<tag>[a-z]+)\]
-''' % (url_re.pattern, MENTIONS_REGEX.pattern),
-    re.VERBOSE | re.MULTILINE | re.IGNORECASE)
-
-
-def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
-    """
-    Parses given text message and make literal html with markup.
-    The text will be truncated to the specified length.
-    Hashes are turned into changeset links to specified repository.
-    URLs links to what they say.
-    Issues are linked to given issue-server.
-    If link_ is provided, all text not already linking somewhere will link there.
-    >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>")
-    literal('Urlify <a href="http://example.com/">http://example.com/</a> and &#39;<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> &lt;b&gt;markup/b&gt;')
-    """
-
-    def _replace(match_obj):
-        url = match_obj.group('url')
-        if url is not None:
-            return '<a href="%(url)s">%(url)s</a>' % {'url': url}
-        mention = match_obj.group('mention')
-        if mention is not None:
-            return '<b>%s</b>' % mention
-        hash_ = match_obj.group('hash')
-        if hash_ is not None and repo_name is not None:
-            from kallithea.config.routing import url  # doh, we need to re-import url to mock it later
-            return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
-                 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
-                 'hash': hash_,
-                }
-        bold = match_obj.group('bold')
-        if bold is not None:
-            return '<b>*%s*</b>' % _urlify(bold[1:-1])
-        if stylize:
-            seen = match_obj.group('seen')
-            if seen:
-                return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
-            license = match_obj.group('license')
-            if license:
-                return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
-            tagtype = match_obj.group('tagtype')
-            if tagtype:
-                tagvalue = match_obj.group('tagvalue')
-                return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
-            lang = match_obj.group('lang')
-            if lang:
-                return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
-            tag = match_obj.group('tag')
-            if tag:
-                return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
-        return match_obj.group(0)
-
-    def _urlify(s):
-        """
-        Extract urls from text and make html links out of them
-        """
-        return _URLIFY_RE.sub(_replace, s)
-
-    if truncate is None:
-        s = s.rstrip()
-    else:
-        s = truncatef(s, truncate, whole_word=True)
-    s = html_escape(s)
-    s = _urlify(s)
-    if repo_name is not None:
-        s = urlify_issues(s, repo_name)
-    if link_ is not None:
-        # make href around everything that isn't a href already
-        s = linkify_others(s, link_)
-    s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
-    # Turn HTML5 into more valid HTML4 as required by some mail readers.
-    # (This is not done in one step in html_escape, because character codes like
-    # &#123; risk to be seen as an issue reference due to the presence of '#'.)
-    s = s.replace("&apos;", "&#39;")
-    return literal(s)
-
-
-def linkify_others(t, l):
-    """Add a default link to html with links.
-    HTML doesn't allow nesting of links, so the outer link must be broken up
-    in pieces and give space for other links.
-    """
-    urls = re.compile(r'(\<a.*?\<\/a\>)',)
-    links = []
-    for e in urls.split(t):
-        if e.strip() and not urls.match(e):
-            links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
-        else:
-            links.append(e)
-
-    return ''.join(links)
-
-
-# Global variable that will hold the actual urlify_issues function body.
-# Will be set on first use when the global configuration has been read.
-_urlify_issues_f = None
-
-
-def urlify_issues(newtext, repo_name):
-    """Urlify issue references according to .ini configuration"""
-    global _urlify_issues_f
-    if _urlify_issues_f is None:
-        from kallithea import CONFIG
-        from kallithea.model.db import URL_SEP
-        assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
-
-        # Build chain of urlify functions, starting with not doing any transformation
-        def tmp_urlify_issues_f(s):
-            return s
-
-        issue_pat_re = re.compile(r'issue_pat(.*)')
-        for k in CONFIG:
-            # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
-            m = issue_pat_re.match(k)
-            if m is None:
-                continue
-            suffix = m.group(1)
-            issue_pat = CONFIG.get(k)
-            issue_server_link = CONFIG.get('issue_server_link%s' % suffix)
-            issue_sub = CONFIG.get('issue_sub%s' % suffix)
-            issue_prefix = CONFIG.get('issue_prefix%s' % suffix)
-            if issue_prefix:
-                log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix)
-            if not issue_pat:
-                log.error('skipping incomplete issue pattern %r: it needs a regexp', k)
-                continue
-            if not issue_server_link:
-                log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix)
-                continue
-            if issue_sub is None: # issue_sub can be empty but should be present
-                log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix)
-                continue
-
-            # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
-            try:
-                issue_re = re.compile(issue_pat)
-            except re.error as e:
-                log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e))
-                continue
-
-            log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub)
-
-            def issues_replace(match_obj,
-                               issue_server_link=issue_server_link, issue_sub=issue_sub):
-                try:
-                    issue_url = match_obj.expand(issue_server_link)
-                except (IndexError, re.error) as e:
-                    log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
-                    issue_url = issue_server_link
-                issue_url = issue_url.replace('{repo}', repo_name)
-                issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
-                # if issue_sub is empty use the matched issue reference verbatim
-                if not issue_sub:
-                    issue_text = match_obj.group()
-                else:
-                    try:
-                        issue_text = match_obj.expand(issue_sub)
-                    except (IndexError, re.error) as e:
-                        log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
-                        issue_text = match_obj.group()
-
-                return (
-                    '<a class="issue-tracker-link" href="%(url)s">'
-                    '%(text)s'
-                    '</a>'
-                    ) % {
-                     'url': issue_url,
-                     'text': issue_text,
-                    }
-
-            def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
-                return issue_re.sub(issues_replace, chain_f(s))
-
-        # Set tmp function globally - atomically
-        _urlify_issues_f = tmp_urlify_issues_f
-
-    return _urlify_issues_f(newtext)
-
-
-def render_w_mentions(source, repo_name=None):
-    """
-    Render plain text with revision hashes and issue references urlified
-    and with @mention highlighting.
-    """
-    s = safe_str(source)
-    s = urlify_text(s, repo_name=repo_name)
-    return literal('<div class="formatted-fixed">%s</div>' % s)
-
-
-def short_ref(ref_type, ref_name):
-    if ref_type == 'rev':
-        return short_id(ref_name)
-    return ref_name
-
-
-def link_to_ref(repo_name, ref_type, ref_name, rev=None):
-    """
-    Return full markup for a href to changeset_home for a changeset.
-    If ref_type is branch it will link to changelog.
-    ref_name is shortened if ref_type is 'rev'.
-    if rev is specified show it too, explicitly linking to that revision.
-    """
-    txt = short_ref(ref_type, ref_name)
-    if ref_type == 'branch':
-        u = url('changelog_home', repo_name=repo_name, branch=ref_name)
-    else:
-        u = url('changeset_home', repo_name=repo_name, revision=ref_name)
-    l = link_to(repo_name + '#' + txt, u)
-    if rev and ref_type != 'rev':
-        l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
-    return l
-
-
 def changeset_status(repo, revision):
-    from kallithea.model.changeset_status import ChangesetStatusModel
     return ChangesetStatusModel().get_status(repo, revision)
 
 
 def changeset_status_lbl(changeset_status):
-    from kallithea.model.db import ChangesetStatus
-    return ChangesetStatus.get_status_lbl(changeset_status)
+    return db.ChangesetStatus.get_status_lbl(changeset_status)
 
 
 def get_permission_name(key):
-    from kallithea.model.db import Permission
-    return dict(Permission.PERMS).get(key)
+    return dict(db.Permission.PERMS).get(key)
 
 
 def journal_filter_help():
@@ -1319,35 +869,6 @@
     '''))
 
 
-def not_mapped_error(repo_name):
-    flash(_('%s repository is not mapped to db perhaps'
-            ' it was created or renamed from the filesystem'
-            ' please run the application again'
-            ' in order to rescan repositories') % repo_name, category='error')
-
-
 def ip_range(ip_addr):
-    from kallithea.model.db import UserIpMap
-    s, e = UserIpMap._get_ip_range(ip_addr)
+    s, e = db.UserIpMap._get_ip_range(ip_addr)
     return '%s - %s' % (s, e)
-
-
-session_csrf_secret_name = "_session_csrf_secret_token"
-
-def session_csrf_secret_token():
-    """Return (and create) the current session's CSRF protection token."""
-    from tg import session
-    if not session_csrf_secret_name in session:
-        session[session_csrf_secret_name] = str(random.getrandbits(128))
-        session.save()
-    return session[session_csrf_secret_name]
-
-def form(url, method="post", **attrs):
-    """Like webhelpers.html.tags.form , but automatically adding
-    session_csrf_secret_token for POST. The secret is thus never leaked in GET
-    URLs.
-    """
-    form = insecure_form(url, method, **attrs)
-    if method.lower() == 'get':
-        return form
-    return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
--- a/kallithea/lib/hooks.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/hooks.py	Thu May 27 21:27:37 2021 +0200
@@ -15,7 +15,8 @@
 kallithea.lib.hooks
 ~~~~~~~~~~~~~~~~~~~
 
-Hooks run by Kallithea
+Hooks run by Kallithea. Generally called 'log_*', but will also do important
+invalidation of caches and run extension hooks.
 
 This file was forked by the Kallithea project in July 2014.
 Original author and date, and relevant copyright and licensing information is below:
@@ -25,119 +26,49 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import os
-import sys
 import time
 
-import mercurial.scmutil
-
-from kallithea.lib import helpers as h
+import kallithea
 from kallithea.lib.exceptions import UserCreationError
-from kallithea.lib.utils import action_logger, make_ui
-from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-from kallithea.model.db import Repository, User
+from kallithea.lib.utils2 import get_hook_environment
+from kallithea.model import userlog
 
 
-def _get_scm_size(alias, root_path):
-    if not alias.startswith('.'):
-        alias += '.'
-
-    size_scm, size_root = 0, 0
-    for path, dirs, files in os.walk(root_path):
-        if path.find(alias) != -1:
-            for f in files:
-                try:
-                    size_scm += os.path.getsize(os.path.join(path, f))
-                except OSError:
-                    pass
-        else:
-            for f in files:
-                try:
-                    size_root += os.path.getsize(os.path.join(path, f))
-                except OSError:
-                    pass
-
-    size_scm_f = h.format_byte_size(size_scm)
-    size_root_f = h.format_byte_size(size_root)
-    size_total_f = h.format_byte_size(size_root + size_scm)
-
-    return size_scm_f, size_root_f, size_total_f
-
-
-def repo_size(ui, repo, hooktype=None, **kwargs):
-    """Show size of Mercurial repository.
-
-    Called as Mercurial hook changegroup.repo_size after push.
-    """
-    size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', safe_str(repo.root))
-
-    last_cs = repo[len(repo) - 1]
-
-    msg = ('Repository size .hg: %s Checkout: %s Total: %s\n'
-           'Last revision is now r%s:%s\n') % (
-        size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12]
-    )
-    ui.status(safe_bytes(msg))
-
-
-def log_pull_action(ui, repo, **kwargs):
+def log_pull_action():
     """Logs user last pull action
 
-    Called as Mercurial hook outgoing.pull_logger or from Kallithea before invoking Git.
-
     Does *not* use the action from the hook environment but is always 'pull'.
     """
     ex = get_hook_environment()
 
-    user = User.get_by_username(ex.username)
     action = 'pull'
-    action_logger(user, action, ex.repository, ex.ip, commit=True)
+    userlog.action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
     # extension hook call
-    from kallithea import EXTENSIONS
-    callback = getattr(EXTENSIONS, 'PULL_HOOK', None)
+    callback = getattr(kallithea.EXTENSIONS, 'PULL_HOOK', None)
     if callable(callback):
         kw = {}
         kw.update(ex)
         callback(**kw)
 
-    return 0
-
-
-def log_push_action(ui, repo, node, node_last, **kwargs):
-    """
-    Register that changes have been added to the repo - log the action *and* invalidate caches.
-    Note: This hook is not only logging, but also the side effect invalidating
-    caches! The function should perhaps be renamed.
-
-    Called as Mercurial hook changegroup.kallithea_log_push_action .
-
-    The pushed changesets is given by the revset 'node:node_last'.
-    """
-    revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])]
-    process_pushed_raw_ids(revs)
-    return 0
-
 
 def process_pushed_raw_ids(revs):
     """
     Register that changes have been added to the repo - log the action *and* invalidate caches.
 
-    Called from Mercurial changegroup.kallithea_log_push_action calling hook log_push_action,
+    Called from Mercurial changegroup.kallithea_push_action calling hook push_action,
     or from the Git post-receive hook calling handle_git_post_receive ...
     or from scm _handle_push.
     """
     ex = get_hook_environment()
 
     action = '%s:%s' % (ex.action, ','.join(revs))
-    action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
+    userlog.action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
 
     from kallithea.model.scm import ScmModel
     ScmModel().mark_for_invalidation(ex.repository)
 
     # extension hook call
-    from kallithea import EXTENSIONS
-    callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
+    callback = getattr(kallithea.EXTENSIONS, 'PUSH_HOOK', None)
     if callable(callback):
         kw = {'pushed_revs': revs}
         kw.update(ex)
@@ -167,22 +98,18 @@
      'repo_name'
 
     """
-    from kallithea import EXTENSIONS
-    callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None)
+    callback = getattr(kallithea.EXTENSIONS, 'CREATE_REPO_HOOK', None)
     if callable(callback):
         kw = {}
         kw.update(repository_dict)
         kw.update({'created_by': created_by})
         kw.update(kwargs)
-        return callback(**kw)
-
-    return 0
+        callback(**kw)
 
 
 def check_allowed_create_user(user_dict, created_by, **kwargs):
     # pre create hooks
-    from kallithea import EXTENSIONS
-    callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None)
+    callback = getattr(kallithea.EXTENSIONS, 'PRE_CREATE_USER_HOOK', None)
     if callable(callback):
         allowed, reason = callback(created_by=created_by, **user_dict)
         if not allowed:
@@ -217,14 +144,23 @@
      'emails',
 
     """
-    from kallithea import EXTENSIONS
-    callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None)
+    callback = getattr(kallithea.EXTENSIONS, 'CREATE_USER_HOOK', None)
     if callable(callback):
-        return callback(created_by=created_by, **user_dict)
+        callback(created_by=created_by, **user_dict)
+
+
+def log_create_pullrequest(pullrequest_dict, created_by, **kwargs):
+    """
+    Post create pull request hook.
+
+    :param pullrequest_dict: dict dump of pull request object
+    """
+    callback = getattr(kallithea.EXTENSIONS, 'CREATE_PULLREQUEST_HOOK', None)
+    if callable(callback):
+        return callback(created_by=created_by, **pullrequest_dict)
 
     return 0
 
-
 def log_delete_repository(repository_dict, deleted_by, **kwargs):
     """
     Post delete repository Hook.
@@ -248,17 +184,14 @@
      'repo_name'
 
     """
-    from kallithea import EXTENSIONS
-    callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None)
+    callback = getattr(kallithea.EXTENSIONS, 'DELETE_REPO_HOOK', None)
     if callable(callback):
         kw = {}
         kw.update(repository_dict)
         kw.update({'deleted_by': deleted_by,
                    'deleted_on': time.time()})
         kw.update(kwargs)
-        return callback(**kw)
-
-    return 0
+        callback(**kw)
 
 
 def log_delete_user(user_dict, deleted_by, **kwargs):
@@ -289,116 +222,6 @@
      'emails',
 
     """
-    from kallithea import EXTENSIONS
-    callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None)
+    callback = getattr(kallithea.EXTENSIONS, 'DELETE_USER_HOOK', None)
     if callable(callback):
-        return callback(deleted_by=deleted_by, **user_dict)
-
-    return 0
-
-
-def _hook_environment(repo_path):
-    """
-    Create a light-weight environment for stand-alone scripts and return an UI and the
-    db repository.
-
-    Git hooks are executed as subprocess of Git while Kallithea is waiting, and
-    they thus need enough info to be able to create an app environment and
-    connect to the database.
-    """
-    import paste.deploy
-    import kallithea.config.middleware
-
-    extras = get_hook_environment()
-
-    path_to_ini_file = extras['config']
-    kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file)
-    #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging
-    kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
-
-    # fix if it's not a bare repo
-    if repo_path.endswith(os.sep + '.git'):
-        repo_path = repo_path[:-5]
-
-    repo = Repository.get_by_full_path(repo_path)
-    if not repo:
-        raise OSError('Repository %s not found in database' % repo_path)
-
-    baseui = make_ui()
-    return baseui, repo
-
-
-def handle_git_pre_receive(repo_path, git_stdin_lines):
-    """Called from Git pre-receive hook"""
-    # Currently unused. TODO: remove?
-    return 0
-
-
-def handle_git_post_receive(repo_path, git_stdin_lines):
-    """Called from Git post-receive hook"""
-    try:
-        baseui, repo = _hook_environment(repo_path)
-    except HookEnvironmentError as e:
-        sys.stderr.write("Skipping Kallithea Git post-recieve hook %r.\nGit was apparently not invoked by Kallithea: %s\n" % (sys.argv[0], e))
-        return 0
-
-    # the post push hook should never use the cached instance
-    scm_repo = repo.scm_instance_no_cache()
-
-    rev_data = []
-    for l in git_stdin_lines:
-        old_rev, new_rev, ref = l.strip().split(' ')
-        _ref_data = ref.split('/')
-        if _ref_data[1] in ['tags', 'heads']:
-            rev_data.append({'old_rev': old_rev,
-                             'new_rev': new_rev,
-                             'ref': ref,
-                             'type': _ref_data[1],
-                             'name': '/'.join(_ref_data[2:])})
-
-    git_revs = []
-    for push_ref in rev_data:
-        _type = push_ref['type']
-        if _type == 'heads':
-            if push_ref['old_rev'] == EmptyChangeset().raw_id:
-                # update the symbolic ref if we push new repo
-                if scm_repo.is_empty():
-                    scm_repo._repo.refs.set_symbolic_ref(
-                        b'HEAD',
-                        b'refs/heads/%s' % safe_bytes(push_ref['name']))
-
-                # build exclude list without the ref
-                cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*']
-                stdout = scm_repo.run_git_command(cmd)
-                ref = push_ref['ref']
-                heads = [head for head in stdout.splitlines() if head != ref]
-                # now list the git revs while excluding from the list
-                cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H']
-                cmd.append('--not')
-                cmd.extend(heads) # empty list is ok
-                stdout = scm_repo.run_git_command(cmd)
-                git_revs += stdout.splitlines()
-
-            elif push_ref['new_rev'] == EmptyChangeset().raw_id:
-                # delete branch case
-                git_revs += ['delete_branch=>%s' % push_ref['name']]
-            else:
-                cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref,
-                       '--reverse', '--pretty=format:%H']
-                stdout = scm_repo.run_git_command(cmd)
-                git_revs += stdout.splitlines()
-
-        elif _type == 'tags':
-            git_revs += ['tag=>%s' % push_ref['name']]
-
-    process_pushed_raw_ids(git_revs)
-
-    return 0
-
-
-# Almost exactly like Mercurial contrib/hg-ssh:
-def rejectpush(ui, **kwargs):
-    """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos"""
-    ex = get_hook_environment()
-    ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository))
-    return 1
+        callback(deleted_by=deleted_by, **user_dict)
--- a/kallithea/lib/indexers/daemon.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/indexers/daemon.py	Thu May 27 21:27:37 2021 +0200
@@ -28,30 +28,23 @@
 
 import logging
 import os
-import sys
 import traceback
-from os.path import dirname
 from shutil import rmtree
 from time import mktime
 
+from tg import config
 from whoosh.index import create_in, exists_in, open_dir
 from whoosh.qparser import QueryParser
 
-from kallithea.config.conf import INDEX_EXTENSIONS, INDEX_FILENAMES
+from kallithea.lib import celerylib
+from kallithea.lib.conf import INDEX_EXTENSIONS, INDEX_FILENAMES
 from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA
 from kallithea.lib.utils2 import safe_str
 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, NodeDoesNotExistError, RepositoryError
-from kallithea.model.db import Repository
+from kallithea.model import db
 from kallithea.model.scm import ScmModel
 
 
-# Add location of top level folder to sys.path
-project_path = dirname(dirname(dirname(dirname(os.path.realpath(__file__)))))
-sys.path.append(project_path)
-
-
-
-
 log = logging.getLogger('whoosh_indexer')
 
 
@@ -109,7 +102,7 @@
             self.initial = False
 
     def _get_index_revision(self, repo):
-        db_repo = Repository.get_by_repo_name(repo.name)
+        db_repo = db.Repository.get_by_repo_name(repo.name)
         landing_rev = 'tip'
         if db_repo:
             _rev_type, _rev = db_repo.landing_rev
@@ -195,7 +188,6 @@
 
         writer.add_document(
             fileid=path,
-            owner=repo.contact,
             repository_rawname=repo_name,
             repository=repo_name,
             path=path,
@@ -234,7 +226,6 @@
             log.debug('    >> %s %s/%s', cs, indexed, total)
             writer.add_document(
                 raw_id=cs.raw_id,
-                owner=repo.contact,
                 date=cs._timestamp,
                 repository_rawname=repo_name,
                 repository=repo_name,
@@ -455,3 +446,12 @@
             self.build_indexes()
         else:
             self.update_indexes()
+
+
+@celerylib.task
+@celerylib.locked_task
+def whoosh_index(repo_location, full_index):
+    index_location = config['index_dir']
+    WhooshIndexingDaemon(index_location=index_location,
+                         repo_location=repo_location) \
+                         .run(full_index=full_index)
--- a/kallithea/lib/inifile.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/inifile.py	Thu May 27 21:27:37 2021 +0200
@@ -32,7 +32,7 @@
 
 template_file = os.path.join(
     os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
-    'kallithea/lib/paster_commands/template.ini.mako')
+    'kallithea/templates/ini/template.ini.mako')
 
 default_variables = {
     'database_engine': 'sqlite',
@@ -119,9 +119,6 @@
     #variable7 = 7.1
     #variable8 = 8.0
     <BLANKLINE>
-    variable8 = None
-    variable9 = None
-    <BLANKLINE>
     [fourth-section]
     fourth = "four"
     fourth_extra = 4
@@ -180,7 +177,7 @@
                 new_value = section_settings[key]
                 if new_value == line_value:
                     line = line.lstrip('#')
-                else:
+                elif new_value is not None:
                     line += '\n%s = %s' % (key, new_value)
                 section_settings.pop(key)
                 return line
@@ -189,8 +186,12 @@
 
             # 3rd pass:
             # settings that haven't been consumed yet at is appended to section
-            if section_settings:
-                lines += '\n' + ''.join('%s = %s\n' % (key, value) for key, value in sorted(section_settings.items()))
+            append_lines = ''.join(
+                '%s = %s\n' % (key, value)
+                for key, value in sorted(section_settings.items())
+                if value is not None)
+            if append_lines:
+                lines += '\n' + append_lines
 
         return sectionname + '\n' + re.sub('[ \t]+\n', '\n', lines)
 
--- a/kallithea/lib/locale.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-import logging
-import os
-import sys
-
-
-log = logging.getLogger(__name__)
-
-def current_locale_is_valid():
-    """Verify that things work when Dulwich passes unicode paths to the file system layer.
-
-    Note: UTF-8 is preferred, but for example ISO-8859-1 or mbcs should also
-    work under the right circumstances."""
-    try:
-        '\xe9'.encode(sys.getfilesystemencoding()) # Test using é (&eacute;)
-    except UnicodeEncodeError:
-        log.error("Cannot encode Unicode paths to file system encoding %r", sys.getfilesystemencoding())
-        for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
-            if var in os.environ:
-                val = os.environ[var]
-                log.error("Note: Environment variable %s is %r - perhaps change it to some other value from 'locale -a', like 'C.UTF-8' or 'en_US.UTF-8'", var, val)
-                break
-        else:
-            log.error("Note: No locale setting found in environment variables - perhaps set LC_CTYPE to some value from 'locale -a', like 'C.UTF-8' or 'en_US.UTF-8'")
-        return False
-    return True
-
-def get_current_locale():
-    """Return the current locale based on environment variables.
-    There does not seem to be a good (and functional) way to get it via Python.
-    """
-    for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
-        val = os.environ.get(var)
-        if val:
-            log.debug('Determined current locale via environment variable %s (%s)', var, val)
-            return val
-    return None
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/locales.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+import logging
+import os
+import sys
+
+
+log = logging.getLogger(__name__)
+
+def current_locale_is_valid():
+    """Verify that things work when Dulwich passes unicode paths to the file system layer.
+
+    Note: UTF-8 is preferred, but for example ISO-8859-1 or mbcs should also
+    work under the right circumstances."""
+    try:
+        '\xe9'.encode(sys.getfilesystemencoding()) # Test using é (&eacute;)
+    except UnicodeEncodeError:
+        log.error("Cannot encode Unicode paths to file system encoding %r", sys.getfilesystemencoding())
+        for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
+            if var in os.environ:
+                val = os.environ[var]
+                log.error("Note: Environment variable %s is %r - perhaps change it to some other value from 'locale -a', like 'C.UTF-8' or 'en_US.UTF-8'", var, val)
+                break
+        else:
+            log.error("Note: No locale setting found in environment variables - perhaps set LC_CTYPE to some value from 'locale -a', like 'C.UTF-8' or 'en_US.UTF-8'")
+        return False
+    return True
+
+def get_current_locale():
+    """Return the current locale based on environment variables.
+    There does not seem to be a good (and functional) way to get it via Python.
+    """
+    for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
+        val = os.environ.get(var)
+        if val:
+            log.debug('Determined current locale via environment variable %s (%s)', var, val)
+            return val
+    return None
--- a/kallithea/lib/markup_renderer.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/markup_renderer.py	Thu May 27 21:27:37 2021 +0200
@@ -26,23 +26,22 @@
 """
 
 
+import hashlib
 import logging
 import re
 import traceback
 
 import bleach
 import markdown as markdown_mod
+from docutils.core import publish_parts
+from docutils.parsers.rst import directives
 
-from kallithea.lib.utils2 import MENTIONS_REGEX, safe_str
+from kallithea.lib import webutils
 
 
 log = logging.getLogger(__name__)
 
 
-url_re = re.compile(r'''\bhttps?://(?:[\da-zA-Z0-9@:.-]+)'''
-                    r'''(?:[/a-zA-Z0-9_=@#~&+%.,:;?!*()-]*[/a-zA-Z0-9_=@#~])?''')
-
-
 class MarkupRenderer(object):
     RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw']
 
@@ -74,13 +73,12 @@
 
         :param text:
         """
-        from hashlib import md5
 
         # Extract pre blocks.
         extractions = {}
 
         def pre_extraction_callback(matchobj):
-            digest = md5(matchobj.group(0)).hexdigest()
+            digest = hashlib.sha1(matchobj.group(0)).hexdigest()
             extractions[digest] = matchobj.group(0)
             return "{gfm-extraction-%s}" % digest
         pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL)
@@ -154,7 +152,6 @@
         >>> MarkupRenderer.plain('https://example.com/')
         '<br /><a href="https://example.com/">https://example.com/</a>'
         """
-        source = safe_str(source)
         if universal_newline:
             newline = '\n'
             source = newline.join(source.splitlines())
@@ -162,7 +159,7 @@
         def url_func(match_obj):
             url_full = match_obj.group(0)
             return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
-        source = url_re.sub(url_func, source)
+        source = webutils.url_re.sub(url_func, source)
         return '<br />' + source.replace("\n", '<br />')
 
     @classmethod
@@ -195,7 +192,6 @@
         </pre></div>
         </td></tr></table>
         """
-        source = safe_str(source)
         try:
             if flavored:
                 source = cls._flavored_markdown(source)
@@ -213,10 +209,7 @@
 
     @classmethod
     def rst(cls, source, safe=True):
-        source = safe_str(source)
         try:
-            from docutils.core import publish_parts
-            from docutils.parsers.rst import directives
             docutils_settings = dict([(alias, None) for alias in
                                 cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES])
 
@@ -231,9 +224,6 @@
                                   settings_overrides=docutils_settings)
 
             return parts['html_title'] + parts["fragment"]
-        except ImportError:
-            log.warning('Install docutils to use this function')
-            return cls.plain(source)
         except Exception:
             log.error(traceback.format_exc())
             if safe:
@@ -248,5 +238,5 @@
         def wrapp(match_obj):
             uname = match_obj.groups()[0]
             return r'\ **@%(uname)s**\ ' % {'uname': uname}
-        mention_hl = MENTIONS_REGEX.sub(wrapp, source).strip()
+        mention_hl = webutils.MENTIONS_REGEX.sub(wrapp, source).strip()
         return cls.rst(mention_hl)
--- a/kallithea/lib/middleware/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
--- a/kallithea/lib/middleware/appenlight.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.appenlight
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-middleware to handle appenlight publishing of errors
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: October 18, 2012
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-
-try:
-    from appenlight_client import make_appenlight_middleware
-except ImportError:
-    AppEnlight = None
-else:
-    AppEnlight = make_appenlight_middleware
--- a/kallithea/lib/middleware/https_fixup.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.https_fixup
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-middleware to handle https correctly
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: May 23, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-
-from kallithea.lib.utils2 import str2bool
-
-
-class HttpsFixup(object):
-
-    def __init__(self, app, config):
-        self.application = app
-        self.config = config
-
-    def __call__(self, environ, start_response):
-        self.__fixup(environ)
-        debug = str2bool(self.config.get('debug'))
-        is_ssl = environ['wsgi.url_scheme'] == 'https'
-
-        def custom_start_response(status, headers, exc_info=None):
-            if is_ssl and str2bool(self.config.get('use_htsts')) and not debug:
-                headers.append(('Strict-Transport-Security',
-                                'max-age=8640000; includeSubDomains'))
-            return start_response(status, headers, exc_info)
-
-        return self.application(environ, custom_start_response)
-
-    def __fixup(self, environ):
-        """
-        Function to fixup the environ as needed. In order to use this
-        middleware you should set this header inside your
-        proxy ie. nginx, apache etc.
-        """
-        # DETECT PROTOCOL !
-        if 'HTTP_X_URL_SCHEME' in environ:
-            proto = environ.get('HTTP_X_URL_SCHEME')
-        elif 'HTTP_X_FORWARDED_SCHEME' in environ:
-            proto = environ.get('HTTP_X_FORWARDED_SCHEME')
-        elif 'HTTP_X_FORWARDED_PROTO' in environ:
-            proto = environ.get('HTTP_X_FORWARDED_PROTO')
-        else:
-            proto = 'http'
-        org_proto = proto
-
-        # if we have force, just override
-        if str2bool(self.config.get('force_https')):
-            proto = 'https'
-
-        environ['wsgi.url_scheme'] = proto
-        environ['wsgi._org_proto'] = org_proto
--- a/kallithea/lib/middleware/permanent_repo_url.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.permanent_repo_url
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-middleware to handle permanent repo URLs, replacing PATH_INFO '/_123/yada' with
-'/name/of/repo/yada' after looking 123 up in the database.
-"""
-
-
-from kallithea.lib.utils import fix_repo_id_name
-from kallithea.lib.utils2 import safe_bytes, safe_str
-
-
-class PermanentRepoUrl(object):
-
-    def __init__(self, app, config):
-        self.application = app
-        self.config = config
-
-    def __call__(self, environ, start_response):
-        # Extract path_info as get_path_info does, but do it explicitly because
-        # we also have to do the reverse operation when patching it back in
-        path_info = safe_str(environ['PATH_INFO'].encode('latin1'))
-        if path_info.startswith('/'): # it must
-            path_info = '/' + fix_repo_id_name(path_info[1:])
-            environ['PATH_INFO'] = safe_bytes(path_info).decode('latin1')
-
-        return self.application(environ, start_response)
--- a/kallithea/lib/middleware/pygrack.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,228 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.pygrack
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Python implementation of git-http-backend's Smart HTTP protocol
-
-Based on original code from git_http_backend.py project.
-
-Copyright (c) 2010 Daniel Dotsenko <dotsa@hotmail.com>
-Copyright (c) 2012 Marcin Kuzminski <marcin@python-works.com>
-
-This file was forked by the Kallithea project in July 2014.
-"""
-
-import logging
-import os
-import socket
-import traceback
-
-from webob import Request, Response, exc
-
-import kallithea
-from kallithea.lib.utils2 import ascii_bytes
-from kallithea.lib.vcs import subprocessio
-
-
-log = logging.getLogger(__name__)
-
-
-class FileWrapper(object):
-
-    def __init__(self, fd, content_length):
-        self.fd = fd
-        self.content_length = content_length
-        self.remain = content_length
-
-    def read(self, size):
-        if size <= self.remain:
-            try:
-                data = self.fd.read(size)
-            except socket.error:
-                raise IOError(self)
-            self.remain -= size
-        elif self.remain:
-            data = self.fd.read(self.remain)
-            self.remain = 0
-        else:
-            data = None
-        return data
-
-    def __repr__(self):
-        return '<FileWrapper %s len: %s, read: %s>' % (
-            self.fd, self.content_length, self.content_length - self.remain
-        )
-
-
-class GitRepository(object):
-    git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
-    commands = ['git-upload-pack', 'git-receive-pack']
-
-    def __init__(self, repo_name, content_path):
-        files = set([f.lower() for f in os.listdir(content_path)])
-        if not (self.git_folder_signature.intersection(files)
-                == self.git_folder_signature):
-            raise OSError('%s missing git signature' % content_path)
-        self.content_path = content_path
-        self.valid_accepts = ['application/x-%s-result' %
-                              c for c in self.commands]
-        self.repo_name = repo_name
-
-    def _get_fixedpath(self, path):
-        """
-        Small fix for repo_path
-
-        :param path:
-        """
-        assert path.startswith('/' + self.repo_name + '/')
-        return path[len(self.repo_name) + 2:].strip('/')
-
-    def inforefs(self, req, environ):
-        """
-        WSGI Response producer for HTTP GET Git Smart
-        HTTP /info/refs request.
-        """
-
-        git_command = req.GET.get('service')
-        if git_command not in self.commands:
-            log.debug('command %s not allowed', git_command)
-            return exc.HTTPMethodNotAllowed()
-
-        # From Documentation/technical/http-protocol.txt shipped with Git:
-        #
-        # Clients MUST verify the first pkt-line is `# service=$servicename`.
-        # Servers MUST set $servicename to be the request parameter value.
-        # Servers SHOULD include an LF at the end of this line.
-        # Clients MUST ignore an LF at the end of the line.
-        #
-        #  smart_reply     =  PKT-LINE("# service=$servicename" LF)
-        #                     ref_list
-        #                     "0000"
-        server_advert = '# service=%s\n' % git_command
-        packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
-        _git_path = kallithea.CONFIG.get('git_path', 'git')
-        cmd = [_git_path, git_command[4:],
-               '--stateless-rpc', '--advertise-refs', self.content_path]
-        log.debug('handling cmd %s', cmd)
-        try:
-            out = subprocessio.SubprocessIOChunker(cmd,
-                starting_values=[ascii_bytes(packet_len + server_advert + '0000')]
-            )
-        except EnvironmentError as e:
-            log.error(traceback.format_exc())
-            raise exc.HTTPExpectationFailed()
-        resp = Response()
-        resp.content_type = 'application/x-%s-advertisement' % git_command
-        resp.charset = None
-        resp.app_iter = out
-        return resp
-
-    def backend(self, req, environ):
-        """
-        WSGI Response producer for HTTP POST Git Smart HTTP requests.
-        Reads commands and data from HTTP POST's body.
-        returns an iterator obj with contents of git command's
-        response to stdout
-        """
-        _git_path = kallithea.CONFIG.get('git_path', 'git')
-        git_command = self._get_fixedpath(req.path_info)
-        if git_command not in self.commands:
-            log.debug('command %s not allowed', git_command)
-            return exc.HTTPMethodNotAllowed()
-
-        if 'CONTENT_LENGTH' in environ:
-            inputstream = FileWrapper(environ['wsgi.input'],
-                                      req.content_length)
-        else:
-            inputstream = environ['wsgi.input']
-
-        gitenv = dict(os.environ)
-        # forget all configs
-        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
-        cmd = [_git_path, git_command[4:], '--stateless-rpc', self.content_path]
-        log.debug('handling cmd %s', cmd)
-        try:
-            out = subprocessio.SubprocessIOChunker(
-                cmd,
-                inputstream=inputstream,
-                env=gitenv,
-                cwd=self.content_path,
-            )
-        except EnvironmentError as e:
-            log.error(traceback.format_exc())
-            raise exc.HTTPExpectationFailed()
-
-        if git_command in ['git-receive-pack']:
-            # updating refs manually after each push.
-            # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
-            from kallithea.lib.vcs import get_repo
-            from dulwich.server import update_server_info
-            repo = get_repo(self.content_path)
-            if repo:
-                update_server_info(repo._repo)
-
-        resp = Response()
-        resp.content_type = 'application/x-%s-result' % git_command
-        resp.charset = None
-        resp.app_iter = out
-        return resp
-
-    def __call__(self, environ, start_response):
-        req = Request(environ)
-        _path = self._get_fixedpath(req.path_info)
-        if _path.startswith('info/refs'):
-            app = self.inforefs
-        elif req.accept.acceptable_offers(self.valid_accepts):
-            app = self.backend
-        try:
-            resp = app(req, environ)
-        except exc.HTTPException as e:
-            resp = e
-            log.error(traceback.format_exc())
-        except Exception as e:
-            log.error(traceback.format_exc())
-            resp = exc.HTTPInternalServerError()
-        return resp(environ, start_response)
-
-
-class GitDirectory(object):
-
-    def __init__(self, repo_root, repo_name):
-        repo_location = os.path.join(repo_root, repo_name)
-        if not os.path.isdir(repo_location):
-            raise OSError(repo_location)
-
-        self.content_path = repo_location
-        self.repo_name = repo_name
-        self.repo_location = repo_location
-
-    def __call__(self, environ, start_response):
-        content_path = self.content_path
-        try:
-            app = GitRepository(self.repo_name, content_path)
-        except (AssertionError, OSError):
-            content_path = os.path.join(content_path, '.git')
-            if os.path.isdir(content_path):
-                app = GitRepository(self.repo_name, content_path)
-            else:
-                return exc.HTTPNotFound()(environ, start_response)
-        return app(environ, start_response)
-
-
-def make_wsgi_app(repo_name, repo_root):
-    from dulwich.web import LimitedInputFilter, GunzipFilter
-    app = GitDirectory(repo_root, repo_name)
-    return GunzipFilter(LimitedInputFilter(app))
--- a/kallithea/lib/middleware/sentry.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.sentry
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-middleware to handle sentry/raven publishing of errors
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: September 18, 2012
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-
-try:
-    from raven.base import Client
-    from raven.contrib.pylons import list_from_setting
-    from raven.middleware import Sentry as Middleware
-except ImportError:
-    Sentry = None
-else:
-    class Sentry(Middleware):
-        def __init__(self, app, config, client_cls=Client):
-            client = client_cls(
-                dsn=config.get('sentry.dsn'),
-                servers=list_from_setting(config, 'sentry.servers'),
-                name=config.get('sentry.name'),
-                key=config.get('sentry.key'),
-                public_key=config.get('sentry.public_key'),
-                secret_key=config.get('sentry.secret_key'),
-                project=config.get('sentry.project'),
-                site=config.get('sentry.site'),
-                include_paths=list_from_setting(config, 'sentry.include_paths'),
-                exclude_paths=list_from_setting(config, 'sentry.exclude_paths'),
-            )
-            super(Sentry, self).__init__(app, client)
--- a/kallithea/lib/middleware/simplegit.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,98 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.simplegit
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-SimpleGit middleware for handling Git protocol requests (push/clone etc.)
-It's implemented with basic auth function
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 28, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-
-"""
-
-
-import logging
-import re
-
-from kallithea.lib.base import BaseVCSController, get_path_info
-from kallithea.lib.hooks import log_pull_action
-from kallithea.lib.middleware.pygrack import make_wsgi_app
-from kallithea.lib.utils import make_ui
-from kallithea.model.db import Repository
-
-
-log = logging.getLogger(__name__)
-
-
-GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)$')
-
-
-cmd_mapping = {
-    'git-receive-pack': 'push',
-    'git-upload-pack': 'pull',
-}
-
-
-class SimpleGit(BaseVCSController):
-
-    scm_alias = 'git'
-
-    @classmethod
-    def parse_request(cls, environ):
-        path_info = get_path_info(environ)
-        m = GIT_PROTO_PAT.match(path_info)
-        if m is None:
-            return None
-
-        class parsed_request(object):
-            # See https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_the_smart_protocol
-            repo_name = m.group(1).rstrip('/')
-            cmd = m.group(2)
-
-            query_string = environ['QUERY_STRING']
-            if cmd == 'info/refs' and query_string.startswith('service='):
-                service = query_string.split('=', 1)[1]
-                action = cmd_mapping.get(service)
-            else:
-                service = None
-                action = cmd_mapping.get(cmd)
-
-        return parsed_request
-
-    def _make_app(self, parsed_request):
-        """
-        Return a pygrack wsgi application.
-        """
-        pygrack_app = make_wsgi_app(parsed_request.repo_name, self.basepath)
-
-        def wrapper_app(environ, start_response):
-            if (parsed_request.cmd == 'info/refs' and
-                parsed_request.service == 'git-upload-pack'
-            ):
-                baseui = make_ui()
-                repo = Repository.get_by_repo_name(parsed_request.repo_name)
-                scm_repo = repo.scm_instance
-                # Run hooks, like Mercurial outgoing.pull_logger does
-                log_pull_action(ui=baseui, repo=scm_repo._repo)
-            # Note: push hooks are handled by post-receive hook
-
-            return pygrack_app(environ, start_response)
-
-        return wrapper_app
--- a/kallithea/lib/middleware/simplehg.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,149 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.simplehg
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-SimpleHg middleware for handling Mercurial protocol requests (push/clone etc.).
-It's implemented with basic auth function
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 28, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-
-"""
-
-
-import logging
-import os
-import urllib.parse
-
-import mercurial.hgweb
-
-from kallithea.lib.base import BaseVCSController, get_path_info
-from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_bytes
-
-
-log = logging.getLogger(__name__)
-
-
-def get_header_hgarg(environ):
-    """Decode the special Mercurial encoding of big requests over multiple headers.
-    >>> get_header_hgarg({})
-    ''
-    >>> get_header_hgarg({'HTTP_X_HGARG_0': ' ', 'HTTP_X_HGARG_1': 'a','HTTP_X_HGARG_2': '','HTTP_X_HGARG_3': 'b+c %20'})
-    'ab+c %20'
-    """
-    chunks = []
-    i = 1
-    while True:
-        v = environ.get('HTTP_X_HGARG_%d' % i)
-        if v is None:
-            break
-        chunks.append(v)
-        i += 1
-    return ''.join(chunks)
-
-
-cmd_mapping = {
-    # 'batch' is not in this list - it is handled explicitly
-    'between': 'pull',
-    'branches': 'pull',
-    'branchmap': 'pull',
-    'capabilities': 'pull',
-    'changegroup': 'pull',
-    'changegroupsubset': 'pull',
-    'changesetdata': 'pull',
-    'clonebundles': 'pull',
-    'debugwireargs': 'pull',
-    'filedata': 'pull',
-    'getbundle': 'pull',
-    'getlfile': 'pull',
-    'heads': 'pull',
-    'hello': 'pull',
-    'known': 'pull',
-    'lheads': 'pull',
-    'listkeys': 'pull',
-    'lookup': 'pull',
-    'manifestdata': 'pull',
-    'narrow_widen': 'pull',
-    'protocaps': 'pull',
-    'statlfile': 'pull',
-    'stream_out': 'pull',
-    'pushkey': 'push',
-    'putlfile': 'push',
-    'unbundle': 'push',
-    }
-
-
-class SimpleHg(BaseVCSController):
-
-    scm_alias = 'hg'
-
-    @classmethod
-    def parse_request(cls, environ):
-        http_accept = environ.get('HTTP_ACCEPT', '')
-        if not http_accept.startswith('application/mercurial'):
-            return None
-        path_info = get_path_info(environ)
-        if not path_info.startswith('/'): # it must!
-            return None
-
-        class parsed_request(object):
-            repo_name = path_info[1:].rstrip('/')
-
-            query_string = environ['QUERY_STRING']
-
-            action = None
-            for qry in query_string.split('&'):
-                parts = qry.split('=', 1)
-                if len(parts) == 2 and parts[0] == 'cmd':
-                    cmd = parts[1]
-                    if cmd == 'batch':
-                        hgarg = get_header_hgarg(environ)
-                        if not hgarg.startswith('cmds='):
-                            action = 'push' # paranoid and safe
-                            break
-                        action = 'pull'
-                        for cmd_arg in hgarg[5:].split(';'):
-                            cmd, _args = urllib.parse.unquote_plus(cmd_arg).split(' ', 1)
-                            op = cmd_mapping.get(cmd, 'push')
-                            if op != 'pull':
-                                assert op == 'push'
-                                action = 'push'
-                                break
-                    else:
-                        action = cmd_mapping.get(cmd, 'push')
-                    break # only process one cmd
-
-        return parsed_request
-
-    def _make_app(self, parsed_request):
-        """
-        Make an hgweb wsgi application.
-        """
-        repo_name = parsed_request.repo_name
-        repo_path = os.path.join(self.basepath, repo_name)
-        baseui = make_ui(repo_path=repo_path)
-        hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
-
-        def wrapper_app(environ, start_response):
-            environ['REPO_NAME'] = repo_name # used by mercurial.hgweb.hgweb
-            return hgweb_app(environ, start_response)
-
-        return wrapper_app
--- a/kallithea/lib/middleware/wrapper.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,102 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.middleware.wrapper
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Wrap app to measure request and response time ... all the way to the response
-WSGI iterator has been closed.
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: May 23, 2013
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import logging
-import time
-
-from kallithea.lib.base import _get_ip_addr, get_path_info
-
-
-log = logging.getLogger(__name__)
-
-
-class Meter:
-
-    def __init__(self, start_response):
-        self._start_response = start_response
-        self._start = time.time()
-        self.status = None
-        self._size = 0
-
-    def duration(self):
-        return time.time() - self._start
-
-    def start_response(self, status, response_headers, exc_info=None):
-        self.status = status
-        write = self._start_response(status, response_headers, exc_info)
-        def metered_write(s):
-            self.measure(s)
-            write(s)
-        return metered_write
-
-    def measure(self, chunk):
-        self._size += len(chunk)
-
-    def size(self):
-        return self._size
-
-
-class ResultIter:
-
-    def __init__(self, result, meter, description):
-        self._result_close = getattr(result, 'close', None) or (lambda: None)
-        self._next = iter(result).__next__
-        self._meter = meter
-        self._description = description
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        chunk = self._next()
-        self._meter.measure(chunk)
-        return chunk
-
-    def close(self):
-        self._result_close()
-        log.info("%s responded %r after %.3fs with %s bytes", self._description, self._meter.status, self._meter.duration(), self._meter.size())
-
-
-class RequestWrapper(object):
-
-    def __init__(self, app, config):
-        self.application = app
-        self.config = config
-
-    def __call__(self, environ, start_response):
-        meter = Meter(start_response)
-        description = "Request from %s for %s" % (
-            _get_ip_addr(environ),
-            get_path_info(environ),
-        )
-        log.info("%s received", description)
-        try:
-            result = self.application(environ, meter.start_response)
-        finally:
-            log.info("%s responding %r after %.3fs", description, meter.status, meter.duration())
-        return ResultIter(result, meter, description)
--- a/kallithea/lib/page.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/page.py	Thu May 27 21:27:37 2021 +0200
@@ -19,9 +19,8 @@
 import paginate
 import paginate_sqlalchemy
 import sqlalchemy.orm
-from webhelpers2.html import literal
 
-from kallithea.config.routing import url
+from kallithea.lib import webutils
 
 
 log = logging.getLogger(__name__)
@@ -35,10 +34,10 @@
         if isinstance(collection, sqlalchemy.orm.query.Query):
             collection = paginate_sqlalchemy.SqlalchemyOrmWrapper(collection)
         paginate.Page.__init__(self, collection, page=page, items_per_page=items_per_page, item_count=item_count,
-                               url_maker=lambda page: url.current(page=page, **kwargs))
+                               url_maker=lambda page: webutils.url.current(page=page, **kwargs))
 
     def pager(self):
-        return literal(
+        return webutils.literal(
             paginate.Page.pager(self,
                 format='<ul class="pagination">$link_previous\n~4~$link_next</ul>',
                 link_attr={'class': 'pager_link'},
--- a/kallithea/lib/paster_commands/template.ini.mako	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,617 +0,0 @@
-## -*- coding: utf-8 -*-
-<%text>##</%text>#################################################################################
-<%text>##</%text>#################################################################################
-<%text>##</%text> Kallithea config file generated with kallithea-cli ${'%-27s' % version       }##
-<%text>##</%text>                                                                               ##
-<%text>##</%text> The %(here)s variable will generally be replaced with the parent directory of ##
-<%text>##</%text> this file. Other use of % must be escaped as %% .                             ##
-<%text>##</%text>#################################################################################
-<%text>##</%text>#################################################################################
-
-[DEFAULT]
-
-<%text>##</%text>##############################################################################
-<%text>##</%text> Email settings                                                             ##
-<%text>##</%text>                                                                            ##
-<%text>##</%text> Refer to the documentation ("Email settings") for more details.            ##
-<%text>##</%text>                                                                            ##
-<%text>##</%text> It is recommended to use a valid sender address that passes access         ##
-<%text>##</%text> validation and spam filtering in mail servers.                             ##
-<%text>##</%text>##############################################################################
-
-<%text>##</%text> 'From' header for application emails. You can optionally add a name.
-<%text>##</%text> Default:
-#app_email_from = Kallithea
-<%text>##</%text> Examples:
-#app_email_from = Kallithea <kallithea-noreply@example.com>
-#app_email_from = kallithea-noreply@example.com
-
-<%text>##</%text> Subject prefix for application emails.
-<%text>##</%text> A space between this prefix and the real subject is automatically added.
-<%text>##</%text> Default:
-#email_prefix =
-<%text>##</%text> Example:
-#email_prefix = [Kallithea]
-
-<%text>##</%text> Recipients for error emails and fallback recipients of application mails.
-<%text>##</%text> Multiple addresses can be specified, comma-separated.
-<%text>##</%text> Only addresses are allowed, do not add any name part.
-<%text>##</%text> Default:
-#email_to =
-<%text>##</%text> Examples:
-#email_to = admin@example.com
-#email_to = admin@example.com,another_admin@example.com
-email_to =
-
-<%text>##</%text> 'From' header for error emails. You can optionally add a name.
-<%text>##</%text> Default: (none)
-<%text>##</%text> Examples:
-#error_email_from = Kallithea Errors <kallithea-noreply@example.com>
-#error_email_from = kallithea_errors@example.com
-error_email_from =
-
-<%text>##</%text> SMTP server settings
-<%text>##</%text> If specifying credentials, make sure to use secure connections.
-<%text>##</%text> Default: Send unencrypted unauthenticated mails to the specified smtp_server.
-<%text>##</%text> For "SSL", use smtp_use_ssl = true and smtp_port = 465.
-<%text>##</%text> For "STARTTLS", use smtp_use_tls = true and smtp_port = 587.
-smtp_server =
-smtp_username =
-smtp_password =
-smtp_port =
-smtp_use_ssl = false
-smtp_use_tls = false
-
-%if http_server != 'uwsgi':
-<%text>##</%text> Entry point for 'gearbox serve'
-[server:main]
-host = ${host}
-port = ${port}
-
-%if http_server == 'gearbox':
-<%text>##</%text> Gearbox default web server ##
-use = egg:gearbox#wsgiref
-<%text>##</%text> nr of worker threads to spawn
-threadpool_workers = 1
-<%text>##</%text> max request before thread respawn
-threadpool_max_requests = 100
-<%text>##</%text> option to use threads of process
-use_threadpool = true
-
-%elif http_server == 'gevent':
-<%text>##</%text> Gearbox gevent web server ##
-use = egg:gearbox#gevent
-
-%elif http_server == 'waitress':
-<%text>##</%text> WAITRESS ##
-use = egg:waitress#main
-<%text>##</%text> number of worker threads
-threads = 1
-<%text>##</%text> MAX BODY SIZE 100GB
-max_request_body_size = 107374182400
-<%text>##</%text> use poll instead of select, fixes fd limits, may not work on old
-<%text>##</%text> windows systems.
-#asyncore_use_poll = True
-
-%elif http_server == 'gunicorn':
-<%text>##</%text> GUNICORN ##
-use = egg:gunicorn#main
-<%text>##</%text> number of process workers. You must set `instance_id = *` when this option
-<%text>##</%text> is set to more than one worker
-workers = 4
-<%text>##</%text> process name
-proc_name = kallithea
-<%text>##</%text> type of worker class, one of sync, eventlet, gevent, tornado
-<%text>##</%text> recommended for bigger setup is using of of other than sync one
-worker_class = sync
-max_requests = 1000
-<%text>##</%text> amount of time a worker can handle request before it gets killed and
-<%text>##</%text> restarted
-timeout = 3600
-
-%endif
-%else:
-<%text>##</%text> UWSGI ##
-[uwsgi]
-<%text>##</%text> Note: this section is parsed by the uWSGI .ini parser when run as:
-<%text>##</%text> uwsgi --venv /srv/kallithea/venv --ini-paste-logged my.ini
-<%text>##</%text> Note: in uWSGI 2.0.18 or older, pastescript needs to be installed to
-<%text>##</%text> get correct application logging. In later versions this is not necessary.
-<%text>##</%text> pip install pastescript
-
-<%text>##</%text> HTTP Basics:
-http-socket = ${host}:${port}
-buffer-size = 65535                    ; Mercurial will use huge GET headers for discovery
-
-<%text>##</%text> Scaling:
-master = true                          ; Use separate master and worker processes
-auto-procname = true                   ; Name worker processes accordingly
-lazy = true                            ; App *must* be loaded in workers - db connections can't be shared
-workers = 4                            ; On demand scaling up to this many worker processes
-cheaper = 1                            ; Initial and on demand scaling down to this many worker processes
-max-requests = 1000                    ; Graceful reload of worker processes to avoid leaks
-
-<%text>##</%text> Tweak defaults:
-strict = true                          ; Fail on unknown config directives
-enable-threads = true                  ; Enable Python threads (not threaded workers)
-vacuum = true                          ; Delete sockets during shutdown
-single-interpreter = true
-die-on-term = true                     ; Shutdown when receiving SIGTERM (default is respawn)
-need-app = true                        ; Exit early if no app can be loaded.
-reload-on-exception = true             ; Don't assume that the application worker can process more requests after a severe error
-
-%endif
-<%text>##</%text> middleware for hosting the WSGI application under a URL prefix
-#[filter:proxy-prefix]
-#use = egg:PasteDeploy#prefix
-#prefix = /<your-prefix>
-
-[app:main]
-use = egg:kallithea
-<%text>##</%text> enable proxy prefix middleware
-#filter-with = proxy-prefix
-
-full_stack = true
-static_files = true
-
-<%text>##</%text> Internationalization (see setup documentation for details)
-<%text>##</%text> By default, the languages requested by the browser are used if available, with English as default.
-<%text>##</%text> Set i18n.enabled=false to disable automatic language choice.
-#i18n.enabled = true
-<%text>##</%text> To Force a language, set i18n.enabled=false and specify the language in i18n.lang.
-<%text>##</%text> Valid values are the names of subdirectories in kallithea/i18n with a LC_MESSAGES/kallithea.mo
-#i18n.lang = en
-
-cache_dir = %(here)s/data
-index_dir = %(here)s/data/index
-
-<%text>##</%text> uncomment and set this path to use archive download cache
-archive_cache_dir = %(here)s/tarballcache
-
-<%text>##</%text> change this to unique ID for security
-app_instance_uuid = ${uuid()}
-
-<%text>##</%text> cut off limit for large diffs (size in bytes)
-cut_off_limit = 256000
-
-<%text>##</%text> force https in Kallithea, fixes https redirects, assumes it's always https
-force_https = false
-
-<%text>##</%text> use Strict-Transport-Security headers
-use_htsts = false
-
-<%text>##</%text> number of commits stats will parse on each iteration
-commit_parse_limit = 25
-
-<%text>##</%text> Path to Python executable to be used for git hooks.
-<%text>##</%text> This value will be written inside the git hook scripts as the text
-<%text>##</%text> after '#!' (shebang). When empty or not defined, the value of
-<%text>##</%text> 'sys.executable' at the time of installation of the git hooks is
-<%text>##</%text> used, which is correct in many cases but for example not when using uwsgi.
-<%text>##</%text> If you change this setting, you should reinstall the Git hooks via
-<%text>##</%text> Admin > Settings > Remap and Rescan.
-#git_hook_interpreter = /srv/kallithea/venv/bin/python3
-%if git_hook_interpreter:
-git_hook_interpreter = ${git_hook_interpreter}
-%endif
-
-<%text>##</%text> path to git executable
-git_path = git
-
-<%text>##</%text> git rev filter option, --all is the default filter, if you need to
-<%text>##</%text> hide all refs in changelog switch this to --branches --tags
-#git_rev_filter = --branches --tags
-
-<%text>##</%text> RSS feed options
-rss_cut_off_limit = 256000
-rss_items_per_page = 10
-rss_include_diff = false
-
-<%text>##</%text> options for showing and identifying changesets
-show_sha_length = 12
-show_revision_number = false
-
-<%text>##</%text> Canonical URL to use when creating full URLs in UI and texts.
-<%text>##</%text> Useful when the site is available under different names or protocols.
-<%text>##</%text> Defaults to what is provided in the WSGI environment.
-#canonical_url = https://kallithea.example.com/repos
-
-<%text>##</%text> gist URL alias, used to create nicer urls for gist. This should be an
-<%text>##</%text> url that does rewrites to _admin/gists/<gistid>.
-<%text>##</%text> example: http://gist.example.com/{gistid}. Empty means use the internal
-<%text>##</%text> Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid>
-gist_alias_url =
-
-<%text>##</%text> default encoding used to convert from and to unicode
-<%text>##</%text> can be also a comma separated list of encoding in case of mixed encodings
-default_encoding = utf-8
-
-<%text>##</%text> Set Mercurial encoding, similar to setting HGENCODING before launching Kallithea
-hgencoding = utf-8
-
-<%text>##</%text> issue tracker for Kallithea (leave blank to disable, absent for default)
-#bugtracker = https://bitbucket.org/conservancy/kallithea/issues
-
-<%text>##</%text> issue tracking mapping for commit messages, comments, PR descriptions, ...
-<%text>##</%text> Refer to the documentation ("Integration with issue trackers") for more details.
-
-<%text>##</%text> regular expression to match issue references
-<%text>##</%text> This pattern may/should contain parenthesized groups, that can
-<%text>##</%text> be referred to in issue_server_link or issue_sub using Python backreferences
-<%text>##</%text> (e.g. \1, \2, ...). You can also create named groups with '(?P<groupname>)'.
-<%text>##</%text> To require mandatory whitespace before the issue pattern, use:
-<%text>##</%text> (?:^|(?<=\s)) before the actual pattern, and for mandatory whitespace
-<%text>##</%text> behind the issue pattern, use (?:$|(?=\s)) after the actual pattern.
-
-issue_pat = #(\d+)
-
-<%text>##</%text> server url to the issue
-<%text>##</%text> This pattern may/should contain backreferences to parenthesized groups in issue_pat.
-<%text>##</%text> A backreference can be \1, \2, ... or \g<groupname> if you specified a named group
-<%text>##</%text> called 'groupname' in issue_pat.
-<%text>##</%text> The special token {repo} is replaced with the full repository name
-<%text>##</%text> including repository groups, while {repo_name} is replaced with just
-<%text>##</%text> the name of the repository.
-
-issue_server_link = https://issues.example.com/{repo}/issue/\1
-
-<%text>##</%text> substitution pattern to use as the link text
-<%text>##</%text> If issue_sub is empty, the text matched by issue_pat is retained verbatim
-<%text>##</%text> for the link text. Otherwise, the link text is that of issue_sub, with any
-<%text>##</%text> backreferences to groups in issue_pat replaced.
-
-issue_sub =
-
-<%text>##</%text> issue_pat, issue_server_link and issue_sub can have suffixes to specify
-<%text>##</%text> multiple patterns, to other issues server, wiki or others
-<%text>##</%text> below an example how to create a wiki pattern
-<%text>##</%text> wiki-some-id -> https://wiki.example.com/some-id
-
-#issue_pat_wiki = wiki-(\S+)
-#issue_server_link_wiki = https://wiki.example.com/\1
-#issue_sub_wiki = WIKI-\1
-
-<%text>##</%text> alternative return HTTP header for failed authentication. Default HTTP
-<%text>##</%text> response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with
-<%text>##</%text> handling that. Set this variable to 403 to return HTTPForbidden
-auth_ret_code =
-
-<%text>##</%text> allows to change the repository location in settings page
-allow_repo_location_change = True
-
-<%text>##</%text> allows to setup custom hooks in settings page
-allow_custom_hooks_settings = True
-
-<%text>##</%text> extra extensions for indexing, space separated and without the leading '.'.
-#index.extensions =
-#    gemfile
-#    lock
-
-<%text>##</%text> extra filenames for indexing, space separated
-#index.filenames =
-#    .dockerignore
-#    .editorconfig
-#    INSTALL
-#    CHANGELOG
-
-<%text>##</%text>##################################
-<%text>##</%text>            SSH CONFIG          ##
-<%text>##</%text>##################################
-
-<%text>##</%text> SSH is disabled by default, until an Administrator decides to enable it.
-ssh_enabled = false
-
-<%text>##</%text> File where users' SSH keys will be stored *if* ssh_enabled is true.
-#ssh_authorized_keys = /home/kallithea/.ssh/authorized_keys
-%if user_home_path:
-ssh_authorized_keys = ${user_home_path}/.ssh/authorized_keys
-%endif
-
-<%text>##</%text> Path to be used in ssh_authorized_keys file to invoke kallithea-cli with ssh-serve.
-#kallithea_cli_path = /srv/kallithea/venv/bin/kallithea-cli
-%if kallithea_cli_path:
-kallithea_cli_path = ${kallithea_cli_path}
-%endif
-
-<%text>##</%text> Locale to be used in the ssh-serve command.
-<%text>##</%text> This is needed because an SSH client may try to use its own locale
-<%text>##</%text> settings, which may not be available on the server.
-<%text>##</%text> See `locale -a` for valid values on this system.
-#ssh_locale = C.UTF-8
-%if ssh_locale:
-ssh_locale = ${ssh_locale}
-%endif
-
-<%text>##</%text>##################################
-<%text>##</%text>         CELERY CONFIG          ##
-<%text>##</%text>##################################
-
-<%text>##</%text> Note: Celery doesn't support Windows.
-use_celery = false
-
-<%text>##</%text> Celery config settings from https://docs.celeryproject.org/en/4.4.0/userguide/configuration.html prefixed with 'celery.'.
-
-<%text>##</%text> Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea':
-celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost
-
-celery.result_backend = db+sqlite:///celery-results.db
-
-#celery.amqp.task.result.expires = 18000
-
-celery.worker_concurrency = 2
-celery.worker_max_tasks_per_child = 1
-
-<%text>##</%text> If true, tasks will never be sent to the queue, but executed locally instead.
-celery.task_always_eager = false
-
-<%text>##</%text>##################################
-<%text>##</%text>          BEAKER CACHE          ##
-<%text>##</%text>##################################
-
-beaker.cache.data_dir = %(here)s/data/cache/data
-beaker.cache.lock_dir = %(here)s/data/cache/lock
-
-beaker.cache.regions = long_term,long_term_file
-
-beaker.cache.long_term.type = memory
-beaker.cache.long_term.expire = 36000
-beaker.cache.long_term.key_length = 256
-
-beaker.cache.long_term_file.type = file
-beaker.cache.long_term_file.expire = 604800
-beaker.cache.long_term_file.key_length = 256
-
-<%text>##</%text>##################################
-<%text>##</%text>        BEAKER SESSION          ##
-<%text>##</%text>##################################
-
-<%text>##</%text> Name of session cookie. Should be unique for a given host and path, even when running
-<%text>##</%text> on different ports. Otherwise, cookie sessions will be shared and messed up.
-session.key = kallithea
-<%text>##</%text> Sessions should always only be accessible by the browser, not directly by JavaScript.
-session.httponly = true
-<%text>##</%text> Session lifetime. 2592000 seconds is 30 days.
-session.timeout = 2592000
-
-<%text>##</%text> Server secret used with HMAC to ensure integrity of cookies.
-session.secret = ${uuid()}
-<%text>##</%text> Further, encrypt the data with AES.
-#session.encrypt_key = <key_for_encryption>
-#session.validate_key = <validation_key>
-
-<%text>##</%text> Type of storage used for the session, current types are
-<%text>##</%text> dbm, file, memcached, database, and memory.
-
-<%text>##</%text> File system storage of session data. (default)
-#session.type = file
-
-<%text>##</%text> Cookie only, store all session data inside the cookie. Requires secure secrets.
-#session.type = cookie
-
-<%text>##</%text> Database storage of session data.
-#session.type = ext:database
-#session.sa.url = postgresql://postgres:qwe@localhost/kallithea
-#session.table_name = db_session
-
-<%text>##</%text>##################################
-<%text>##</%text>        ERROR HANDLING          ##
-<%text>##</%text>##################################
-
-<%text>##</%text> Show a nice error page for application HTTP errors and exceptions (default true)
-#errorpage.enabled = true
-
-<%text>##</%text> Enable Backlash client-side interactive debugger (default false)
-<%text>##</%text> WARNING: *THIS MUST BE false IN PRODUCTION ENVIRONMENTS!!!*
-<%text>##</%text> This debug mode will allow all visitors to execute malicious code.
-#debug = false
-
-<%text>##</%text> Enable Backlash server-side error reporting (unless debug mode handles it client-side) (default true)
-#trace_errors.enable = true
-<%text>##</%text> Errors will be reported by mail if trace_errors.error_email is set.
-
-<%text>##</%text> Propagate email settings to ErrorReporter of TurboGears2
-<%text>##</%text> You do not normally need to change these lines
-get trace_errors.smtp_server = smtp_server
-get trace_errors.smtp_port = smtp_port
-get trace_errors.from_address = error_email_from
-get trace_errors.error_email = email_to
-get trace_errors.smtp_username = smtp_username
-get trace_errors.smtp_password = smtp_password
-get trace_errors.smtp_use_tls = smtp_use_tls
-
-%if error_aggregation_service == 'sentry':
-<%text>##</%text>##############
-<%text>##</%text>  [sentry]  ##
-<%text>##</%text>##############
-
-<%text>##</%text> sentry is a alternative open source error aggregator
-<%text>##</%text> you must install python packages `sentry` and `raven` to enable
-
-sentry.dsn = YOUR_DNS
-sentry.servers =
-sentry.name =
-sentry.key =
-sentry.public_key =
-sentry.secret_key =
-sentry.project =
-sentry.site =
-sentry.include_paths =
-sentry.exclude_paths =
-
-%endif
-
-<%text>##</%text>################################
-<%text>##</%text>        LOGVIEW CONFIG        ##
-<%text>##</%text>################################
-
-logview.sqlalchemy = #faa
-logview.pylons.templating = #bfb
-logview.pylons.util = #eee
-
-<%text>##</%text>#######################
-<%text>##</%text>      DB CONFIG      ##
-<%text>##</%text>#######################
-
-%if database_engine == 'sqlite':
-<%text>##</%text> SQLITE [default]
-sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
-
-%elif database_engine == 'postgres':
-<%text>##</%text> POSTGRESQL
-sqlalchemy.url = postgresql://user:pass@localhost/kallithea
-
-%elif database_engine == 'mysql':
-<%text>##</%text> MySQL
-sqlalchemy.url = mysql://user:pass@localhost/kallithea?charset=utf8
-<%text>##</%text> Note: the mysql:// prefix should also be used for MariaDB
-
-%endif
-<%text>##</%text> see sqlalchemy docs for other backends
-
-sqlalchemy.pool_recycle = 3600
-
-<%text>##</%text>##############################
-<%text>##</%text>   ALEMBIC CONFIGURATION    ##
-<%text>##</%text>##############################
-
-[alembic]
-script_location = kallithea:alembic
-
-<%text>##</%text>##############################
-<%text>##</%text>   LOGGING CONFIGURATION    ##
-<%text>##</%text>##############################
-
-[loggers]
-keys = root, routes, kallithea, sqlalchemy, tg, gearbox, beaker, templates, whoosh_indexer, werkzeug, backlash
-
-[handlers]
-keys = console, console_color, console_color_sql, null
-
-[formatters]
-keys = generic, color_formatter, color_formatter_sql
-
-<%text>##</%text>###########
-<%text>##</%text> LOGGERS ##
-<%text>##</%text>###########
-
-[logger_root]
-level = NOTSET
-handlers = console
-<%text>##</%text> For coloring based on log level:
-#handlers = console_color
-
-[logger_routes]
-level = WARN
-handlers =
-qualname = routes.middleware
-<%text>##</%text> "level = DEBUG" logs the route matched and routing variables.
-
-[logger_beaker]
-level = WARN
-handlers =
-qualname = beaker.container
-
-[logger_templates]
-level = WARN
-handlers =
-qualname = pylons.templating
-
-[logger_kallithea]
-level = WARN
-handlers =
-qualname = kallithea
-
-[logger_tg]
-level = WARN
-handlers =
-qualname = tg
-
-[logger_gearbox]
-level = WARN
-handlers =
-qualname = gearbox
-
-[logger_sqlalchemy]
-level = WARN
-handlers =
-qualname = sqlalchemy.engine
-<%text>##</%text> For coloring based on log level and pretty printing of SQL:
-#level = INFO
-#handlers = console_color_sql
-#propagate = 0
-
-[logger_whoosh_indexer]
-level = WARN
-handlers =
-qualname = whoosh_indexer
-
-[logger_werkzeug]
-level = WARN
-handlers =
-qualname = werkzeug
-
-[logger_backlash]
-level = WARN
-handlers =
-qualname = backlash
-
-<%text>##</%text>############
-<%text>##</%text> HANDLERS ##
-<%text>##</%text>############
-
-[handler_console]
-class = StreamHandler
-args = (sys.stderr,)
-formatter = generic
-
-[handler_console_color]
-<%text>##</%text> ANSI color coding based on log level
-class = StreamHandler
-args = (sys.stderr,)
-formatter = color_formatter
-
-[handler_console_color_sql]
-<%text>##</%text> ANSI color coding and pretty printing of SQL statements
-class = StreamHandler
-args = (sys.stderr,)
-formatter = color_formatter_sql
-
-[handler_null]
-class = NullHandler
-args = ()
-
-<%text>##</%text>##############
-<%text>##</%text> FORMATTERS ##
-<%text>##</%text>##############
-
-[formatter_generic]
-format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
-datefmt = %Y-%m-%d %H:%M:%S
-
-[formatter_color_formatter]
-class = kallithea.lib.colored_formatter.ColorFormatter
-format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
-datefmt = %Y-%m-%d %H:%M:%S
-
-[formatter_color_formatter_sql]
-class = kallithea.lib.colored_formatter.ColorFormatterSql
-format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
-datefmt = %Y-%m-%d %H:%M:%S
-
-<%text>##</%text>###############
-<%text>##</%text> SSH LOGGING ##
-<%text>##</%text>###############
-
-<%text>##</%text> The default loggers use 'handler_console' that uses StreamHandler with
-<%text>##</%text> destination 'sys.stderr'. In the context of the SSH server process, these log
-<%text>##</%text> messages would be sent to the client, which is normally not what you want.
-<%text>##</%text> By default, when running ssh-serve, just use NullHandler and disable logging
-<%text>##</%text> completely. For other logging options, see:
-<%text>##</%text> https://docs.python.org/2/library/logging.handlers.html
-
-[ssh_serve:logger_root]
-level = CRITICAL
-handlers = null
-
-<%text>##</%text> Note: If logging is configured with other handlers, they might need similar
-<%text>##</%text> muting for ssh-serve too.
--- a/kallithea/lib/pygmentsutils.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/pygmentsutils.py	Thu May 27 21:27:37 2021 +0200
@@ -29,6 +29,8 @@
 
 from pygments import lexers
 
+import kallithea
+
 
 def get_extension_descriptions():
     """
@@ -69,10 +71,9 @@
 
 def get_custom_lexer(extension):
     """
-    returns a custom lexer if it's defined in rcextensions module, or None
+    returns a custom lexer if it's defined in the extensions module, or None
     if there's no custom lexer defined
     """
-    import kallithea
     lexer_name = getattr(kallithea.EXTENSIONS, 'EXTRA_LEXERS', {}).get(extension)
     if lexer_name is None:
         return None
--- a/kallithea/lib/rcmail/exceptions.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-
-
-class InvalidMessage(RuntimeError):
-    """
-    Raised if message is missing vital headers, such
-    as recipients or sender address.
-    """
-
-
-class BadHeaders(RuntimeError):
-    """
-    Raised if message contains newlines in headers.
-    """
--- a/kallithea/lib/rcmail/message.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,152 +0,0 @@
-from kallithea.lib.rcmail.exceptions import BadHeaders, InvalidMessage
-from kallithea.lib.rcmail.response import MailResponse
-
-
-class Message(object):
-    """
-    Encapsulates an email message.
-
-    :param subject: email subject header
-    :param recipients: list of email addresses
-    :param body: plain text message
-    :param html: HTML message
-    :param sender: email sender address
-    :param cc: CC list
-    :param bcc: BCC list
-    :param extra_headers: dict of extra email headers
-    :param attachments: list of Attachment instances
-    :param recipients_separator: alternative separator for any of
-        'From', 'To', 'Delivered-To', 'Cc', 'Bcc' fields
-    """
-
-    def __init__(self,
-                 subject=None,
-                 recipients=None,
-                 body=None,
-                 html=None,
-                 sender=None,
-                 cc=None,
-                 bcc=None,
-                 extra_headers=None,
-                 attachments=None,
-                 recipients_separator="; "):
-
-        self.subject = subject or ''
-        self.sender = sender
-        self.body = body
-        self.html = html
-
-        self.recipients = recipients or []
-        self.attachments = attachments or []
-        self.cc = cc or []
-        self.bcc = bcc or []
-        self.extra_headers = extra_headers or {}
-
-        self.recipients_separator = recipients_separator
-
-    @property
-    def send_to(self):
-        return set(self.recipients) | set(self.bcc or ()) | set(self.cc or ())
-
-    def to_message(self):
-        """
-        Returns raw email.Message instance.Validates message first.
-        """
-
-        self.validate()
-
-        return self.get_response().to_message()
-
-    def get_response(self):
-        """
-        Creates a Lamson MailResponse instance
-        """
-
-        response = MailResponse(Subject=self.subject,
-                                To=self.recipients,
-                                From=self.sender,
-                                Body=self.body,
-                                Html=self.html,
-                                separator=self.recipients_separator)
-
-        if self.cc:
-            response.base['Cc'] = self.cc
-
-        for attachment in self.attachments:
-
-            response.attach(attachment.filename,
-                            attachment.content_type,
-                            attachment.data,
-                            attachment.disposition)
-
-        response.update(self.extra_headers)
-
-        return response
-
-    def is_bad_headers(self):
-        """
-        Checks for bad headers i.e. newlines in subject, sender or recipients.
-        """
-
-        headers = [self.subject, self.sender]
-        headers += list(self.send_to)
-        headers += self.extra_headers.values()
-
-        for val in headers:
-            for c in '\r\n':
-                if c in val:
-                    return True
-        return False
-
-    def validate(self):
-        """
-        Checks if message is valid and raises appropriate exception.
-        """
-
-        if not self.recipients:
-            raise InvalidMessage("No recipients have been added")
-
-        if not self.body and not self.html:
-            raise InvalidMessage("No body has been set")
-
-        if not self.sender:
-            raise InvalidMessage("No sender address has been set")
-
-        if self.is_bad_headers():
-            raise BadHeaders
-
-    def add_recipient(self, recipient):
-        """
-        Adds another recipient to the message.
-
-        :param recipient: email address of recipient.
-        """
-
-        self.recipients.append(recipient)
-
-    def add_cc(self, recipient):
-        """
-        Adds an email address to the CC list.
-
-        :param recipient: email address of recipient.
-        """
-
-        self.cc.append(recipient)
-
-    def add_bcc(self, recipient):
-        """
-        Adds an email address to the BCC list.
-
-        :param recipient: email address of recipient.
-        """
-
-        self.bcc.append(recipient)
-
-    def attach(self, attachment):
-        """
-        Adds an attachment to the message.
-
-        :param attachment: an **Attachment** instance.
-        """
-
-        self.attachments.append(attachment)
--- a/kallithea/lib/rcmail/response.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,456 +0,0 @@
-# The code in this module is entirely lifted from the Lamson project
-# (http://lamsonproject.org/).  Its copyright is:
-
-# Copyright (c) 2008, Zed A. Shaw
-# All rights reserved.
-
-# It is provided under this license:
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-
-# * Neither the name of the Zed A. Shaw nor the names of its contributors may
-#   be used to endorse or promote products derived from this software without
-#   specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
-# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-# POSSIBILITY OF SUCH DAMAGE.
-
-import mimetypes
-import os
-import string
-from email import encoders
-from email.charset import Charset
-from email.mime.base import MIMEBase
-from email.utils import parseaddr
-
-
-ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc']
-DEFAULT_ENCODING = "utf-8"
-
-def VALUE_IS_EMAIL_ADDRESS(v):
-    return '@' in v
-
-
-def normalize_header(header):
-    return string.capwords(header.lower(), '-')
-
-
-class EncodingError(Exception):
-    """Thrown when there is an encoding error."""
-    pass
-
-
-class MailBase(object):
-    """MailBase is used as the basis of lamson.mail and contains the basics of
-    encoding an email.  You actually can do all your email processing with this
-    class, but it's more raw.
-    """
-    def __init__(self, items=()):
-        self.headers = dict(items)
-        self.parts = []
-        self.body = None
-        self.content_encoding = {'Content-Type': (None, {}),
-                                 'Content-Disposition': (None, {}),
-                                 'Content-Transfer-Encoding': (None, {})}
-
-    def __getitem__(self, key):
-        return self.headers.get(normalize_header(key), None)
-
-    def __len__(self):
-        return len(self.headers)
-
-    def __iter__(self):
-        return iter(self.headers)
-
-    def __contains__(self, key):
-        return normalize_header(key) in self.headers
-
-    def __setitem__(self, key, value):
-        self.headers[normalize_header(key)] = value
-
-    def __delitem__(self, key):
-        del self.headers[normalize_header(key)]
-
-    def __bool__(self):
-        return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0
-
-    def keys(self):
-        """Returns the sorted keys."""
-        return sorted(self.headers.keys())
-
-    def attach_file(self, filename, data, ctype, disposition):
-        """
-        A file attachment is a raw attachment with a disposition that
-        indicates the file name.
-        """
-        assert filename, "You can't attach a file without a filename."
-        ctype = ctype.lower()
-
-        part = MailBase()
-        part.body = data
-        part.content_encoding['Content-Type'] = (ctype, {'name': filename})
-        part.content_encoding['Content-Disposition'] = (disposition,
-                                                        {'filename': filename})
-        self.parts.append(part)
-
-    def attach_text(self, data, ctype):
-        """
-        This attaches a simpler text encoded part, which doesn't have a
-        filename.
-        """
-        ctype = ctype.lower()
-
-        part = MailBase()
-        part.body = data
-        part.content_encoding['Content-Type'] = (ctype, {})
-        self.parts.append(part)
-
-    def walk(self):
-        for p in self.parts:
-            yield p
-            for x in p.walk():
-                yield x
-
-
-class MailResponse(object):
-    """
-    You are given MailResponse objects from the lamson.view methods, and
-    whenever you want to generate an email to send to someone.  It has the
-    same basic functionality as MailRequest, but it is designed to be written
-    to, rather than read from (although you can do both).
-
-    You can easily set a Body or Html during creation or after by passing it
-    as __init__ parameters, or by setting those attributes.
-
-    You can initially set the From, To, and Subject, but they are headers so
-    use the dict notation to change them: msg['From'] = 'joe@example.com'.
-
-    The message is not fully crafted until right when you convert it with
-    MailResponse.to_message.  This lets you change it and work with it, then
-    send it out when it's ready.
-    """
-    def __init__(self, To=None, From=None, Subject=None, Body=None, Html=None,
-                 separator="; "):
-        self.Body = Body
-        self.Html = Html
-        self.base = MailBase([('To', To), ('From', From), ('Subject', Subject)])
-        self.multipart = self.Body and self.Html
-        self.attachments = []
-        self.separator = separator
-
-    def __contains__(self, key):
-        return self.base.__contains__(key)
-
-    def __getitem__(self, key):
-        return self.base.__getitem__(key)
-
-    def __setitem__(self, key, val):
-        return self.base.__setitem__(key, val)
-
-    def __delitem__(self, name):
-        del self.base[name]
-
-    def attach(self, filename=None, content_type=None, data=None,
-               disposition=None):
-        """
-
-        Simplifies attaching files from disk or data as files.  To attach
-        simple text simple give data and a content_type.  To attach a file,
-        give the data/content_type/filename/disposition combination.
-
-        For convenience, if you don't give data and only a filename, then it
-        will read that file's contents when you call to_message() later.  If
-        you give data and filename then it will assume you've filled data
-        with what the file's contents are and filename is just the name to
-        use.
-        """
-
-        assert filename or data, ("You must give a filename or some data to "
-                                  "attach.")
-        assert data or os.path.exists(filename), ("File doesn't exist, and no "
-                                                  "data given.")
-
-        self.multipart = True
-
-        if filename and not content_type:
-            content_type, encoding = mimetypes.guess_type(filename)
-
-        assert content_type, ("No content type given, and couldn't guess "
-                              "from the filename: %r" % filename)
-
-        self.attachments.append({'filename': filename,
-                                 'content_type': content_type,
-                                 'data': data,
-                                 'disposition': disposition})
-
-    def attach_part(self, part):
-        """
-        Attaches a raw MailBase part from a MailRequest (or anywhere)
-        so that you can copy it over.
-        """
-        self.multipart = True
-
-        self.attachments.append({'filename': None,
-                                 'content_type': None,
-                                 'data': None,
-                                 'disposition': None,
-                                 'part': part,
-                                 })
-
-    def attach_all_parts(self, mail_request):
-        """
-        Used for copying the attachment parts of a mail.MailRequest
-        object for mailing lists that need to maintain attachments.
-        """
-        for part in mail_request.all_parts():
-            self.attach_part(part)
-
-        self.base.content_encoding = mail_request.base.content_encoding.copy()
-
-    def clear(self):
-        """
-        Clears out the attachments so you can redo them.  Use this to keep the
-        headers for a series of different messages with different attachments.
-        """
-        del self.attachments[:]
-        del self.base.parts[:]
-        self.multipart = False
-
-    def update(self, message):
-        """
-        Used to easily set a bunch of heading from another dict
-        like object.
-        """
-        for k in message.keys():
-            self.base[k] = message[k]
-
-    def __str__(self):
-        """
-        Converts to a string.
-        """
-        return self.to_message().as_string()
-
-    def _encode_attachment(self, filename=None, content_type=None, data=None,
-                           disposition=None, part=None):
-        """
-        Used internally to take the attachments mentioned in self.attachments
-        and do the actual encoding in a lazy way when you call to_message.
-        """
-        if part:
-            self.base.parts.append(part)
-        elif filename:
-            if not data:
-                data = open(filename).read()
-
-            self.base.attach_file(filename, data, content_type,
-                                  disposition or 'attachment')
-        else:
-            self.base.attach_text(data, content_type)
-
-        ctype = self.base.content_encoding['Content-Type'][0]
-
-        if ctype and not ctype.startswith('multipart'):
-            self.base.content_encoding['Content-Type'] = ('multipart/mixed', {})
-
-    def to_message(self):
-        """
-        Figures out all the required steps to finally craft the
-        message you need and return it.  The resulting message
-        is also available as a self.base attribute.
-
-        What is returned is a Python email API message you can
-        use with those APIs.  The self.base attribute is the raw
-        lamson.encoding.MailBase.
-        """
-        del self.base.parts[:]
-
-        if self.Body and self.Html:
-            self.multipart = True
-            self.base.content_encoding['Content-Type'] = (
-                'multipart/alternative', {})
-
-        if self.multipart:
-            self.base.body = None
-            if self.Body:
-                self.base.attach_text(self.Body, 'text/plain')
-
-            if self.Html:
-                self.base.attach_text(self.Html, 'text/html')
-
-            for args in self.attachments:
-                self._encode_attachment(**args)
-
-        elif self.Body:
-            self.base.body = self.Body
-            self.base.content_encoding['Content-Type'] = ('text/plain', {})
-
-        elif self.Html:
-            self.base.body = self.Html
-            self.base.content_encoding['Content-Type'] = ('text/html', {})
-
-        return to_message(self.base, separator=self.separator)
-
-    def all_parts(self):
-        """
-        Returns all the encoded parts.  Only useful for debugging
-        or inspecting after calling to_message().
-        """
-        return self.base.parts
-
-    def keys(self):
-        return self.base.keys()
-
-
-def to_message(mail, separator="; "):
-    """
-    Given a MailBase message, this will construct a MIMEPart
-    that is canonicalized for use with the Python email API.
-    """
-    ctype, params = mail.content_encoding['Content-Type']
-
-    if not ctype:
-        if mail.parts:
-            ctype = 'multipart/mixed'
-        else:
-            ctype = 'text/plain'
-    else:
-        if mail.parts:
-            assert ctype.startswith(("multipart", "message")), \
-                   "Content type should be multipart or message, not %r" % ctype
-
-    # adjust the content type according to what it should be now
-    mail.content_encoding['Content-Type'] = (ctype, params)
-
-    try:
-        out = MIMEPart(ctype, **params)
-    except TypeError as e:  # pragma: no cover
-        raise EncodingError("Content-Type malformed, not allowed: %r; "
-                            "%r (Python ERROR: %s)" %
-                            (ctype, params, e.args[0]))
-
-    for k in mail.keys():
-        if k in ADDRESS_HEADERS_WHITELIST:
-            out[k] = header_to_mime_encoding(
-                                         mail[k],
-                                         not_email=False,
-                                         separator=separator
-                                     )
-        else:
-            out[k] = header_to_mime_encoding(
-                                         mail[k],
-                                         not_email=True
-                                    )
-
-    out.extract_payload(mail)
-
-    # go through the children
-    for part in mail.parts:
-        out.attach(to_message(part))
-
-    return out
-
-
-class MIMEPart(MIMEBase):
-    """
-    A reimplementation of nearly everything in email.mime to be more useful
-    for actually attaching things.  Rather than one class for every type of
-    thing you'd encode, there's just this one, and it figures out how to
-    encode what you ask it.
-    """
-    def __init__(self, type, **params):
-        self.maintype, self.subtype = type.split('/')
-        MIMEBase.__init__(self, self.maintype, self.subtype, **params)
-
-    def add_text(self, content):
-        # this is text, so encode it in canonical form
-        try:
-            encoded = content.encode('ascii')
-            charset = 'ascii'
-        except UnicodeError:
-            encoded = content.encode('utf-8')
-            charset = 'utf-8'
-
-        self.set_payload(encoded, charset=charset)
-
-    def extract_payload(self, mail):
-        if mail.body is None:
-            return  # only None, '' is still ok
-
-        ctype, _ctype_params = mail.content_encoding['Content-Type']
-        cdisp, cdisp_params = mail.content_encoding['Content-Disposition']
-
-        assert ctype, ("Extract payload requires that mail.content_encoding "
-                       "have a valid Content-Type.")
-
-        if ctype.startswith("text/"):
-            self.add_text(mail.body)
-        else:
-            if cdisp:
-                # replicate the content-disposition settings
-                self.add_header('Content-Disposition', cdisp, **cdisp_params)
-
-            self.set_payload(mail.body)
-            encoders.encode_base64(self)
-
-    def __repr__(self):
-        return "<MIMEPart '%s/%s': %r, %r, multipart=%r>" % (
-            self.subtype,
-            self.maintype,
-            self['Content-Type'],
-            self['Content-Disposition'],
-            self.is_multipart())
-
-
-def header_to_mime_encoding(value, not_email=False, separator=", "):
-    if not value:
-        return ""
-
-    encoder = Charset(DEFAULT_ENCODING)
-    if isinstance(value, list):
-        return separator.join(properly_encode_header(
-            v, encoder, not_email) for v in value)
-    else:
-        return properly_encode_header(value, encoder, not_email)
-
-
-def properly_encode_header(value, encoder, not_email):
-    """
-    The only thing special (weird) about this function is that it tries
-    to do a fast check to see if the header value has an email address in
-    it.  Since random headers could have an email address, and email addresses
-    have weird special formatting rules, we have to check for it.
-
-    Normally this works fine, but in Librelist, we need to "obfuscate" email
-    addresses by changing the '@' to '-AT-'.  This is where
-    VALUE_IS_EMAIL_ADDRESS exists.  It's a simple lambda returning True/False
-    to check if a header value has an email address.  If you need to make this
-    check different, then change this.
-    """
-    try:
-        value.encode("ascii")
-        return value
-    except UnicodeError:
-        if not not_email and VALUE_IS_EMAIL_ADDRESS(value):
-            # this could have an email address, make sure we don't screw it up
-            name, address = parseaddr(value)
-            return '"%s" <%s>' % (encoder.header_encode(name), address)
-
-        return encoder.header_encode(value)
--- a/kallithea/lib/rcmail/smtp_mailer.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.rcmail.smtp_mailer
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Simple smtp mailer used in Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Sep 13, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import logging
-import smtplib
-import time
-from email.utils import formatdate
-from ssl import SSLError
-
-from kallithea.lib.rcmail.message import Message
-from kallithea.lib.rcmail.utils import DNS_NAME
-
-
-class SmtpMailer(object):
-    """SMTP mailer class
-
-    mailer = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth
-                        mail_port, ssl, tls)
-    mailer.send(recipients, subject, body, attachment_files)
-
-    :param recipients might be a list of string or single string
-    :param attachment_files is a dict of {filename:location}
-        it tries to guess the mimetype and attach the file
-
-    """
-
-    def __init__(self, mail_from, user, passwd, mail_server, smtp_auth=None,
-                 mail_port=None, ssl=False, tls=False, debug=False):
-
-        self.mail_from = mail_from
-        self.mail_server = mail_server
-        self.mail_port = mail_port
-        self.user = user
-        self.passwd = passwd
-        self.ssl = ssl
-        self.tls = tls
-        self.debug = debug
-        self.auth = smtp_auth
-
-    def send(self, recipients=None, subject='', body='', html='',
-             attachment_files=None, headers=None):
-        recipients = recipients or []
-        if isinstance(recipients, str):
-            recipients = [recipients]
-        if headers is None:
-            headers = {}
-        headers.setdefault('Date', formatdate(time.time()))
-        msg = Message(subject, recipients, body, html, self.mail_from,
-                      recipients_separator=", ", extra_headers=headers)
-        raw_msg = msg.to_message()
-
-        if self.ssl:
-            smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port,
-                                         local_hostname=DNS_NAME.get_fqdn())
-        else:
-            smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port,
-                                     local_hostname=DNS_NAME.get_fqdn())
-
-        if self.tls:
-            smtp_serv.ehlo()
-            smtp_serv.starttls()
-
-        if self.debug:
-            smtp_serv.set_debuglevel(1)
-
-        smtp_serv.ehlo()
-        if self.auth:
-            smtp_serv.esmtp_features["auth"] = self.auth
-
-        # if server requires authorization you must provide login and password
-        # but only if we have them
-        if self.user and self.passwd:
-            smtp_serv.login(self.user, self.passwd)
-
-        smtp_serv.sendmail(msg.sender, msg.send_to, raw_msg.as_string())
-        logging.info('MAIL SENT TO: %s' % recipients)
-
-        try:
-            smtp_serv.quit()
-        except SSLError:
-            # SSL error might sometimes be raised in tls connections on closing
-            smtp_serv.close()
--- a/kallithea/lib/rcmail/utils.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-"""
-Email message and email sending related helper functions.
-"""
-
-import socket
-
-
-# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
-# seconds, which slows down the restart of the server.
-class CachedDnsName(object):
-    def __str__(self):
-        return self.get_fqdn()
-
-    def get_fqdn(self):
-        if not hasattr(self, '_fqdn'):
-            self._fqdn = socket.getfqdn()
-        return self._fqdn
-
-
-DNS_NAME = CachedDnsName()
--- a/kallithea/lib/ssh.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/ssh.py	Thu May 27 21:27:37 2021 +0200
@@ -22,12 +22,14 @@
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 import base64
+import binascii
 import logging
 import re
+import struct
 
 from tg.i18n import ugettext as _
 
-from kallithea.lib.utils2 import ascii_bytes, ascii_str
+from kallithea.lib.utils2 import ascii_str
 
 
 log = logging.getLogger(__name__)
@@ -36,6 +38,14 @@
 class SshKeyParseError(Exception):
     """Exception raised by parse_pub_key"""
 
+algorithm_types = {  # mapping name to number of data strings in key
+    # https://tools.ietf.org/html/rfc4253#section-6.6
+    'ssh-rsa': 2,  # e, n
+    'ssh-dss': 4,  # p, q, g, y
+    # https://tools.ietf.org/html/rfc8709
+    'ssh-ed25519': 1,
+    'ssh-ed448': 1,
+}
 
 def parse_pub_key(ssh_key):
     r"""Parse SSH public key string, raise SshKeyParseError or return decoded keytype, data and comment
@@ -48,26 +58,38 @@
     >>> parse_pub_key('''AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='
     >>> parse_pub_key('''abc AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - it must start with key type 'ssh-rsa', 'ssh-dss', 'ssh-ed448', or 'ssh-ed25519'
     >>> parse_pub_key('''ssh-rsa  AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - failed to decode base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ'
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ' seems truncated (it can't be decoded)
     >>> parse_pub_key('''ssh-rsa  AAAAB2NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==''')
     Traceback (most recent call last):
     ...
-    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - base64 part is not 'ssh-rsa' as claimed but 'csh-rsa'
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB2NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==' seems truncated (it contains a partial string length)
+    >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAANVGhpcyBpcyE=''')
+    Traceback (most recent call last):
+    ...
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB2NzaC1yc2EAAAANVGhpcyBpcyE=' seems truncated (it is too short for declared string length 13)
+    >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQ==''')
+    Traceback (most recent call last):
+    ...
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB2NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQ==' seems truncated (it contains too few strings for a ssh-rsa key)
+    >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU=''')
+    Traceback (most recent call last):
+    ...
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - it is a ssh-rsa key but the base64 part contains 'csh-rsa'
     >>> parse_pub_key('''ssh-rsa  AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ''')
     Traceback (most recent call last):
     ...
-    kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ"
-    >>> parse_pub_key(''' ssh-rsa  AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ== and a comment
+    kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ"
+    >>> parse_pub_key(''' ssh-rsa  AAAAB3NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU= and a comment
     ... ''')
-    ('ssh-rsa', b'\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x0bThis is fake!', 'and a comment\n')
+    ('ssh-rsa', b'\x00\x00\x00\x07ssh-rsa\x00\x00\x00\rThis is fake!\x00\x00\x00\x03bye', 'and a comment\n')
     >>> parse_pub_key('''ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIP1NA2kBQIKe74afUXmIWD9ByDYQJqUwW44Y4gJOBRuo''')
     ('ssh-ed25519', b'\x00\x00\x00\x0bssh-ed25519\x00\x00\x00 \xfdM\x03i\x01@\x82\x9e\xef\x86\x9fQy\x88X?A\xc86\x10&\xa50[\x8e\x18\xe2\x02N\x05\x1b\xa8', '')
     """
@@ -76,22 +98,40 @@
 
     parts = ssh_key.split(None, 2)
     if len(parts) < 2:
-        raise SshKeyParseError(_("Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='"))
+        raise SshKeyParseError(_("Invalid SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='"))
 
     keytype, keyvalue, comment = (parts + [''])[:3]
-    if keytype not in ('ssh-rsa', 'ssh-dss', 'ssh-ed25519'):
-        raise SshKeyParseError(_("Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'"))
+    keytype_data_size = algorithm_types.get(keytype)
+    if keytype_data_size is None:
+        raise SshKeyParseError(_("Invalid SSH key - it must start with key type 'ssh-rsa', 'ssh-dss', 'ssh-ed448', or 'ssh-ed25519'"))
 
-    if re.search(r'[^a-zA-Z0-9+/=]', keyvalue):
-        raise SshKeyParseError(_("Incorrect SSH key - unexpected characters in base64 part %r") % keyvalue)
+    if re.search(r'[^a-zA-Z0-9+/=]', keyvalue):  # make sure b64decode doesn't stop at the first invalid character and skip the rest
+        raise SshKeyParseError(_("Invalid SSH key - unexpected characters in base64 part %r") % keyvalue)
 
     try:
         key_bytes = base64.b64decode(keyvalue)
-    except base64.binascii.Error:
-        raise SshKeyParseError(_("Incorrect SSH key - failed to decode base64 part %r") % keyvalue)
+    except binascii.Error:  # Must be caused by truncation - either "Invalid padding" or "Invalid base64-encoded string: number of data characters (x) cannot be 1 more than a multiple of 4"
+        raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it can't be decoded)") % keyvalue)
 
-    if not key_bytes.startswith(b'\x00\x00\x00%c%s\x00' % (len(keytype), ascii_bytes(keytype))):
-        raise SshKeyParseError(_("Incorrect SSH key - base64 part is not %r as claimed but %r") % (keytype, ascii_str(key_bytes[4:].split(b'\0', 1)[0])))
+    # Check key internals to make sure the key wasn't truncated in a way that base64 can decode:
+    # Parse and verify key according to https://tools.ietf.org/html/rfc4253#section-6.6
+    strings = []
+    offset = 0
+    while offset < len(key_bytes):
+        try:
+            string_length, = struct.unpack_from('!I', key_bytes, offset)
+        except struct.error:  # unpack_from requires a buffer of at least 283 bytes for unpacking 4 bytes at offset 279 (actual buffer size is 280)
+            raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it contains a partial string length)") % keyvalue)
+        offset += 4
+        string = key_bytes[offset:offset + string_length]
+        if len(string) != string_length:
+            raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it is too short for declared string length %s)") % (keyvalue, string_length))
+        strings.append(string)
+        offset += string_length
+    if len(strings) != keytype_data_size + 1:
+        raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it contains too few strings for a %s key)") % (keyvalue, keytype))
+    if ascii_str(strings[0]) != keytype:
+        raise SshKeyParseError(_("Invalid SSH key - it is a %s key but the base64 part contains %r") % (keytype, ascii_str(strings[0])))
 
     return keytype, key_bytes, comment
 
@@ -112,15 +152,16 @@
 
 
 def authorized_keys_line(kallithea_cli_path, config_file, key):
-    """
+    r"""
     Return a line as it would appear in .authorized_keys
 
-    >>> from kallithea.model.db import UserSshKeys, User
-    >>> user = User(user_id=7, username='uu')
-    >>> key = UserSshKeys(user_ssh_key_id=17, user=user, description='test key')
-    >>> key.public_key='''ssh-rsa  AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ== and a comment'''
+    >>> getfixture('doctest_mock_ugettext')
+    >>> from kallithea.model import db
+    >>> user = db.User(user_id=7, username='uu')
+    >>> key = db.UserSshKeys(user_ssh_key_id=17, user=user, description='test key')
+    >>> key.public_key='''ssh-rsa  AAAAB3NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU= and a comment'''
     >>> authorized_keys_line('/srv/kallithea/venv/bin/kallithea-cli', '/srv/kallithea/my.ini', key)
-    'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,command="/srv/kallithea/venv/bin/kallithea-cli ssh-serve -c /srv/kallithea/my.ini 7 17" ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==\\n'
+    'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,command="/srv/kallithea/venv/bin/kallithea-cli ssh-serve -c /srv/kallithea/my.ini 7 17" ssh-rsa AAAAB3NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU=\n'
     """
     try:
         keytype, key_bytes, comment = parse_pub_key(key.public_key)
--- a/kallithea/lib/timerproxy.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,47 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-import logging
-import time
-
-from sqlalchemy.interfaces import ConnectionProxy
-
-
-log = logging.getLogger('timerproxy')
-
-BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38)
-
-
-def color_sql(sql):
-    COLOR_SEQ = "\033[1;%dm"
-    COLOR_SQL = YELLOW
-    normal = '\x1b[0m'
-    return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
-
-
-class TimerProxy(ConnectionProxy):
-
-    def __init__(self):
-        super(TimerProxy, self).__init__()
-
-    def cursor_execute(self, execute, cursor, statement, parameters,
-                       context, executemany):
-
-        now = time.time()
-        try:
-            log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
-            return execute(cursor, statement, parameters, context)
-        finally:
-            total = time.time() - now
-            log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
--- a/kallithea/lib/utils.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/utils.py	Thu May 27 21:27:37 2021 +0200
@@ -25,30 +25,27 @@
 :license: GPLv3, see LICENSE.md for more details.
 """
 
-import datetime
 import logging
 import os
 import re
-import sys
 import traceback
 import urllib.error
-from distutils.version import StrictVersion
 
 import mercurial.config
 import mercurial.error
 import mercurial.ui
 
-import kallithea.config.conf
+import kallithea.lib.conf
+from kallithea.lib import webutils
 from kallithea.lib.exceptions import InvalidCloneUriException
-from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes, safe_str
+from kallithea.lib.utils2 import ascii_bytes, aslist, safe_bytes, safe_str
 from kallithea.lib.vcs.backends.git.repository import GitRepository
 from kallithea.lib.vcs.backends.hg.repository import MercurialRepository
 from kallithea.lib.vcs.conf import settings
-from kallithea.lib.vcs.exceptions import RepositoryError, VCSError
+from kallithea.lib.vcs.exceptions import VCSError
 from kallithea.lib.vcs.utils.fakemod import create_module
 from kallithea.lib.vcs.utils.helpers import get_scm
 from kallithea.model import db, meta
-from kallithea.model.db import RepoGroup, Repository, Setting, Ui, User, UserGroup, UserLog
 
 
 log = logging.getLogger(__name__)
@@ -75,7 +72,7 @@
 
 def get_user_group_slug(request):
     _group = request.environ['pylons.routes_dict'].get('id')
-    _group = UserGroup.get(_group)
+    _group = db.UserGroup.get(_group)
     if _group:
         return _group.users_group_name
     return None
@@ -105,66 +102,12 @@
         rest = '/' + rest_
     repo_id = _get_permanent_id(first)
     if repo_id is not None:
-        repo = Repository.get(repo_id)
+        repo = db.Repository.get(repo_id)
         if repo is not None:
             return repo.repo_name + rest
     return path
 
 
-def action_logger(user, action, repo, ipaddr='', commit=False):
-    """
-    Action logger for various actions made by users
-
-    :param user: user that made this action, can be a unique username string or
-        object containing user_id attribute
-    :param action: action to log, should be on of predefined unique actions for
-        easy translations
-    :param repo: string name of repository or object containing repo_id,
-        that action was made on
-    :param ipaddr: optional IP address from what the action was made
-
-    """
-
-    # if we don't get explicit IP address try to get one from registered user
-    # in tmpl context var
-    if not ipaddr:
-        ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
-
-    if getattr(user, 'user_id', None):
-        user_obj = User.get(user.user_id)
-    elif isinstance(user, str):
-        user_obj = User.get_by_username(user)
-    else:
-        raise Exception('You have to provide a user object or a username')
-
-    if getattr(repo, 'repo_id', None):
-        repo_obj = Repository.get(repo.repo_id)
-        repo_name = repo_obj.repo_name
-    elif isinstance(repo, str):
-        repo_name = repo.lstrip('/')
-        repo_obj = Repository.get_by_repo_name(repo_name)
-    else:
-        repo_obj = None
-        repo_name = ''
-
-    user_log = UserLog()
-    user_log.user_id = user_obj.user_id
-    user_log.username = user_obj.username
-    user_log.action = action
-
-    user_log.repository = repo_obj
-    user_log.repository_name = repo_name
-
-    user_log.action_date = datetime.datetime.now()
-    user_log.user_ip = ipaddr
-    meta.Session().add(user_log)
-
-    log.info('Logging action:%s on %s by user:%s ip:%s',
-             action, repo, user_obj, ipaddr)
-    if commit:
-        meta.Session().commit()
-
-
 def get_filesystem_repos(path):
     """
     Scans given path for repos and return (name,(type,path)) tuple
@@ -237,12 +180,6 @@
                 raise InvalidCloneUriException('URI %s URLError: %s' % (url, e))
             except mercurial.error.RepoError as e:
                 raise InvalidCloneUriException('Mercurial %s: %s' % (type(e).__name__, safe_str(bytes(e))))
-        elif url.startswith('svn+http'):
-            try:
-                from hgsubversion.svnrepo import svnremoterepo
-            except ImportError:
-                raise InvalidCloneUriException('URI type %s not supported - hgsubversion is not available' % (url,))
-            svnremoterepo(ui, url).svn.uuid
         elif url.startswith('git+http'):
             raise InvalidCloneUriException('URI type %s not implemented' % (url,))
         else:
@@ -256,8 +193,6 @@
                 GitRepository._check_url(url)
             except urllib.error.URLError as e:
                 raise InvalidCloneUriException('URI %s URLError: %s' % (url, e))
-        elif url.startswith('svn+http'):
-            raise InvalidCloneUriException('URI type %s not implemented' % (url,))
         elif url.startswith('hg+http'):
             raise InvalidCloneUriException('URI type %s not implemented' % (url,))
         else:
@@ -330,7 +265,7 @@
     baseui._tcfg = mercurial.config.config()
 
     sa = meta.Session()
-    for ui_ in sa.query(Ui).order_by(Ui.ui_section, Ui.ui_key):
+    for ui_ in sa.query(db.Ui).order_by(db.Ui.ui_section, db.Ui.ui_key):
         if ui_.ui_active:
             log.debug('config from db: [%s] %s=%r', ui_.ui_section,
                       ui_.ui_key, ui_.ui_value)
@@ -344,8 +279,12 @@
     ssh = baseui.config(b'ui', b'ssh', default=b'ssh')
     baseui.setconfig(b'ui', b'ssh', b'%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
     # push / pull hooks
-    baseui.setconfig(b'hooks', b'changegroup.kallithea_log_push_action', b'python:kallithea.lib.hooks.log_push_action')
-    baseui.setconfig(b'hooks', b'outgoing.kallithea_log_pull_action', b'python:kallithea.lib.hooks.log_pull_action')
+    baseui.setconfig(b'hooks', b'changegroup.kallithea_push_action', b'python:kallithea.bin.vcs_hooks.push_action')
+    baseui.setconfig(b'hooks', b'outgoing.kallithea_pull_action', b'python:kallithea.bin.vcs_hooks.pull_action')
+    if baseui.config(b'hooks', ascii_bytes(db.Ui.HOOK_REPO_SIZE)):  # ignore actual value
+        baseui.setconfig(b'hooks', ascii_bytes(db.Ui.HOOK_REPO_SIZE), b'python:kallithea.bin.vcs_hooks.repo_size')
+    if baseui.config(b'hooks', ascii_bytes(db.Ui.HOOK_UPDATE)):  # ignore actual value
+        baseui.setconfig(b'hooks', ascii_bytes(db.Ui.HOOK_UPDATE), b'python:kallithea.bin.vcs_hooks.update')
 
     if repo_path is not None:
         # Note: MercurialRepository / mercurial.localrepo.instance will do this too, so it will always be possible to override db settings or what is hardcoded above
@@ -361,10 +300,10 @@
 
     :param config:
     """
-    hgsettings = Setting.get_app_settings()
-    for k, v in hgsettings.items():
+    settings = db.Setting.get_app_settings()
+    for k, v in settings.items():
         config[k] = v
-    config['base_path'] = Ui.get_repos_location()
+    config['base_path'] = db.Ui.get_repos_location()
 
 
 def set_vcs_config(config):
@@ -391,10 +330,10 @@
     :param config: kallithea.CONFIG
     """
     log.debug('adding extra into INDEX_EXTENSIONS')
-    kallithea.config.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', '')))
+    kallithea.lib.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', '')))
 
     log.debug('adding extra into INDEX_FILENAMES')
-    kallithea.config.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', '')))
+    kallithea.lib.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', '')))
 
 
 def map_groups(path):
@@ -407,17 +346,17 @@
     """
     from kallithea.model.repo_group import RepoGroupModel
     sa = meta.Session()
-    groups = path.split(db.URL_SEP)
+    groups = path.split(kallithea.URL_SEP)
     parent = None
     group = None
 
     # last element is repo in nested groups structure
     groups = groups[:-1]
     rgm = RepoGroupModel()
-    owner = User.get_first_admin()
+    owner = db.User.get_first_admin()
     for lvl, group_name in enumerate(groups):
         group_name = '/'.join(groups[:lvl] + [group_name])
-        group = RepoGroup.get_by_group_name(group_name)
+        group = db.RepoGroup.get_by_group_name(group_name)
         desc = '%s group' % group_name
 
         # skip folders that are now removed repos
@@ -427,7 +366,7 @@
         if group is None:
             log.debug('creating group level: %s group_name: %s',
                       lvl, group_name)
-            group = RepoGroup(group_name, parent)
+            group = db.RepoGroup(group_name, parent)
             group.group_description = desc
             group.owner = owner
             sa.add(group)
@@ -457,16 +396,16 @@
     sa = meta.Session()
     repo_model = RepoModel()
     if user is None:
-        user = User.get_first_admin()
+        user = db.User.get_first_admin()
     added = []
 
     # creation defaults
-    defs = Setting.get_default_repo_settings(strip_prefix=True)
+    defs = db.Setting.get_default_repo_settings(strip_prefix=True)
     enable_statistics = defs.get('repo_enable_statistics')
     enable_downloads = defs.get('repo_enable_downloads')
     private = defs.get('repo_private')
 
-    for name, repo in initial_repo_dict.items():
+    for name, repo in sorted(initial_repo_dict.items()):
         group = map_groups(name)
         db_repo = repo_model.get_by_repo_name(name)
         # found repo that is on filesystem not in Kallithea database
@@ -477,17 +416,22 @@
                     if repo.description != 'unknown'
                     else '%s repository' % name)
 
-            new_repo = repo_model._create_repo(
-                repo_name=name,
-                repo_type=repo.alias,
-                description=desc,
-                repo_group=getattr(group, 'group_id', None),
-                owner=user,
-                enable_downloads=enable_downloads,
-                enable_statistics=enable_statistics,
-                private=private,
-                state=Repository.STATE_CREATED
-            )
+            try:
+                new_repo = repo_model._create_repo(
+                    repo_name=name,
+                    repo_type=repo.alias,
+                    description=desc,
+                    repo_group=getattr(group, 'group_id', None),
+                    owner=user,
+                    enable_downloads=enable_downloads,
+                    enable_statistics=enable_statistics,
+                    private=private,
+                    state=db.Repository.STATE_CREATED
+                )
+            except Exception as e:
+                log.error('error creating %r: %s: %s', name, type(e).__name__, e)
+                sa.rollback()
+                continue
             sa.commit()
             # we added that repo just now, and make sure it has githook
             # installed, and updated server info
@@ -498,13 +442,13 @@
                 log.debug('Running update server info')
                 git_repo._update_server_info()
             new_repo.update_changeset_cache()
-        elif install_git_hooks:
+        elif install_git_hooks or overwrite_git_hooks:
             if db_repo.repo_type == 'git':
-                ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks)
+                ScmModel().install_git_hooks(db_repo.scm_instance, force=overwrite_git_hooks)
 
     removed = []
     # remove from database those repositories that are not in the filesystem
-    for repo in sa.query(Repository).all():
+    for repo in sa.query(db.Repository).all():
         if repo.repo_name not in initial_repo_dict:
             if remove_obsolete:
                 log.debug("Removing non-existing repository found in db `%s`",
@@ -520,84 +464,41 @@
     return added, removed
 
 
-def load_rcextensions(root_path):
-    path = os.path.join(root_path, 'rcextensions', '__init__.py')
-    if os.path.isfile(path):
-        rcext = create_module('rc', path)
-        EXT = kallithea.EXTENSIONS = rcext
-        log.debug('Found rcextensions now loading %s...', rcext)
-
-        # Additional mappings that are not present in the pygments lexers
-        kallithea.config.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
-
-        # OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
+def load_extensions(root_path):
+    try:
+        ext = create_module('extensions', os.path.join(root_path, 'extensions.py'))
+    except FileNotFoundError:
+        try:
+            ext = create_module('rc', os.path.join(root_path, 'rcextensions', '__init__.py'))
+            log.warning('The name "rcextensions" is deprecated. Please use a file `extensions.py` instead of a directory `rcextensions`.')
+        except FileNotFoundError:
+            return
 
-        if getattr(EXT, 'INDEX_EXTENSIONS', []):
-            log.debug('settings custom INDEX_EXTENSIONS')
-            kallithea.config.conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
+    log.info('Loaded Kallithea extensions from %s', ext)
+    kallithea.EXTENSIONS = ext
+
+    # Additional mappings that are not present in the pygments lexers
+    kallithea.lib.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(ext, 'EXTRA_MAPPINGS', {}))
 
-        # ADDITIONAL MAPPINGS
-        log.debug('adding extra into INDEX_EXTENSIONS')
-        kallithea.config.conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
+    # Override any INDEX_EXTENSIONS
+    if getattr(ext, 'INDEX_EXTENSIONS', []):
+        log.debug('settings custom INDEX_EXTENSIONS')
+        kallithea.lib.conf.INDEX_EXTENSIONS = getattr(ext, 'INDEX_EXTENSIONS', [])
 
-        # auto check if the module is not missing any data, set to default if is
-        # this will help autoupdate new feature of rcext module
-        #from kallithea.config import rcextensions
-        #for k in dir(rcextensions):
-        #    if not k.startswith('_') and not hasattr(EXT, k):
-        #        setattr(EXT, k, getattr(rcextensions, k))
+    # Additional INDEX_EXTENSIONS
+    log.debug('adding extra into INDEX_EXTENSIONS')
+    kallithea.lib.conf.INDEX_EXTENSIONS.extend(getattr(ext, 'EXTRA_INDEX_EXTENSIONS', []))
 
 
 #==============================================================================
 # MISC
 #==============================================================================
 
-git_req_ver = StrictVersion('1.7.4')
-
-def check_git_version():
-    """
-    Checks what version of git is installed on the system, and raise a system exit
-    if it's too old for Kallithea to work properly.
-    """
-    if 'git' not in kallithea.BACKENDS:
-        return None
-
-    if not settings.GIT_EXECUTABLE_PATH:
-        log.warning('No git executable configured - check "git_path" in the ini file.')
-        return None
-
-    try:
-        stdout, stderr = GitRepository._run_git_command(['--version'])
-    except RepositoryError as e:
-        # message will already have been logged as error
-        log.warning('No working git executable found - check "git_path" in the ini file.')
-        return None
-
-    if stderr:
-        log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, safe_str(stderr))
-
-    if not stdout:
-        log.warning('No working git executable found - check "git_path" in the ini file.')
-        return None
-
-    output = safe_str(stdout).strip()
-    m = re.search(r"\d+.\d+.\d+", output)
-    if m:
-        ver = StrictVersion(m.group(0))
-        log.debug('Git executable: "%s", version %s (parsed from: "%s")',
-                  settings.GIT_EXECUTABLE_PATH, ver, output)
-        if ver < git_req_ver:
-            log.error('Kallithea detected %s version %s, which is too old '
-                      'for the system to function properly. '
-                      'Please upgrade to version %s or later. '
-                      'If you strictly need Mercurial repositories, you can '
-                      'clear the "git_path" setting in the ini file.',
-                      settings.GIT_EXECUTABLE_PATH, ver, git_req_ver)
-            log.error("Terminating ...")
-            sys.exit(1)
-    else:
-        ver = StrictVersion('0.0.0')
-        log.warning('Error finding version number in "%s --version" stdout:\n%s',
-                    settings.GIT_EXECUTABLE_PATH, output)
-
-    return ver
+def extract_mentioned_users(text):
+    """ Returns set of actual database Users @mentioned in given text. """
+    result = set()
+    for name in webutils.extract_mentioned_usernames(text):
+        user = db.User.get_by_username(name, case_insensitive=True)
+        if user is not None and not user.is_default_user:
+            result.add(user)
+    return result
--- a/kallithea/lib/utils2.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/utils2.py	Thu May 27 21:27:37 2021 +0200
@@ -29,17 +29,31 @@
 
 import binascii
 import datetime
+import hashlib
 import json
+import logging
 import os
 import re
+import string
+import sys
 import time
 import urllib.parse
+from distutils.version import StrictVersion
 
+import bcrypt
 import urlobject
-from tg.i18n import ugettext as _
-from tg.i18n import ungettext
+from sqlalchemy.engine import url as sa_url
+from sqlalchemy.exc import ArgumentError
+from tg import tmpl_context
+from tg.support.converters import asbool, aslist
 from webhelpers2.text import collapse, remove_formatting, strip_tags
 
+import kallithea
+from kallithea.lib import webutils
+from kallithea.lib.vcs.backends.base import BaseRepository, EmptyChangeset
+from kallithea.lib.vcs.backends.git.repository import GitRepository
+from kallithea.lib.vcs.conf import settings
+from kallithea.lib.vcs.exceptions import RepositoryError
 from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str  # re-export
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 
@@ -50,7 +64,12 @@
     pass
 
 
+log = logging.getLogger(__name__)
+
+
 # mute pyflakes "imported but unused"
+assert asbool
+assert aslist
 assert ascii_bytes
 assert ascii_str
 assert safe_bytes
@@ -58,42 +77,9 @@
 assert LazyProperty
 
 
-def str2bool(_str):
-    """
-    returns True/False value from given string, it tries to translate the
-    string into boolean
-
-    :param _str: string value to translate into boolean
-    :rtype: boolean
-    :returns: boolean from given string
-    """
-    if _str is None:
-        return False
-    if _str in (True, False):
-        return _str
-    _str = str(_str).strip().lower()
-    return _str in ('t', 'true', 'y', 'yes', 'on', '1')
-
-
-def aslist(obj, sep=None, strip=True):
-    """
-    Returns given string separated by sep as list
-
-    :param obj:
-    :param sep:
-    :param strip:
-    """
-    if isinstance(obj, (str)):
-        lst = obj.split(sep)
-        if strip:
-            lst = [v.strip() for v in lst]
-        return lst
-    elif isinstance(obj, (list, tuple)):
-        return obj
-    elif obj is None:
-        return []
-    else:
-        return [obj]
+# get current umask value without changing it
+umask = os.umask(0)
+os.umask(umask)
 
 
 def convert_line_endings(line, mode):
@@ -182,108 +168,6 @@
     return s
 
 
-def age(prevdate, show_short_version=False, now=None):
-    """
-    turns a datetime into an age string.
-    If show_short_version is True, then it will generate a not so accurate but shorter string,
-    example: 2days ago, instead of 2 days and 23 hours ago.
-
-    :param prevdate: datetime object
-    :param show_short_version: if it should approximate the date and return a shorter string
-    :rtype: str
-    :returns: str words describing age
-    """
-    now = now or datetime.datetime.now()
-    order = ['year', 'month', 'day', 'hour', 'minute', 'second']
-    deltas = {}
-    future = False
-
-    if prevdate > now:
-        now, prevdate = prevdate, now
-        future = True
-    if future:
-        prevdate = prevdate.replace(microsecond=0)
-    # Get date parts deltas
-    from dateutil import relativedelta
-    for part in order:
-        d = relativedelta.relativedelta(now, prevdate)
-        deltas[part] = getattr(d, part + 's')
-
-    # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
-    # not 1 hour, -59 minutes and -59 seconds)
-    for num, length in [(5, 60), (4, 60), (3, 24)]:  # seconds, minutes, hours
-        part = order[num]
-        carry_part = order[num - 1]
-
-        if deltas[part] < 0:
-            deltas[part] += length
-            deltas[carry_part] -= 1
-
-    # Same thing for days except that the increment depends on the (variable)
-    # number of days in the month
-    month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
-    if deltas['day'] < 0:
-        if prevdate.month == 2 and (prevdate.year % 4 == 0 and
-            (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)
-        ):
-            deltas['day'] += 29
-        else:
-            deltas['day'] += month_lengths[prevdate.month - 1]
-
-        deltas['month'] -= 1
-
-    if deltas['month'] < 0:
-        deltas['month'] += 12
-        deltas['year'] -= 1
-
-    # In short version, we want nicer handling of ages of more than a year
-    if show_short_version:
-        if deltas['year'] == 1:
-            # ages between 1 and 2 years: show as months
-            deltas['month'] += 12
-            deltas['year'] = 0
-        if deltas['year'] >= 2:
-            # ages 2+ years: round
-            if deltas['month'] > 6:
-                deltas['year'] += 1
-                deltas['month'] = 0
-
-    # Format the result
-    fmt_funcs = {
-        'year': lambda d: ungettext('%d year', '%d years', d) % d,
-        'month': lambda d: ungettext('%d month', '%d months', d) % d,
-        'day': lambda d: ungettext('%d day', '%d days', d) % d,
-        'hour': lambda d: ungettext('%d hour', '%d hours', d) % d,
-        'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d,
-        'second': lambda d: ungettext('%d second', '%d seconds', d) % d,
-    }
-
-    for i, part in enumerate(order):
-        value = deltas[part]
-        if value == 0:
-            continue
-
-        if i < 5:
-            sub_part = order[i + 1]
-            sub_value = deltas[sub_part]
-        else:
-            sub_value = 0
-
-        if sub_value == 0 or show_short_version:
-            if future:
-                return _('in %s') % fmt_funcs[part](value)
-            else:
-                return _('%s ago') % fmt_funcs[part](value)
-        if future:
-            return _('in %s and %s') % (fmt_funcs[part](value),
-                fmt_funcs[sub_part](sub_value))
-        else:
-            return _('%s and %s ago') % (fmt_funcs[part](value),
-                fmt_funcs[sub_part](sub_value))
-
-    return _('just now')
-
-
 def uri_filter(uri):
     """
     Removes user:password from given url string
@@ -358,6 +242,31 @@
     return str(url_obj)
 
 
+def short_ref_name(ref_type, ref_name):
+    """Return short description of PR ref - revs will be truncated"""
+    if ref_type == 'rev':
+        return ref_name[:12]
+    return ref_name
+
+
+def link_to_ref(repo_name, ref_type, ref_name, rev=None):
+    """
+    Return full markup for a PR ref to changeset_home for a changeset.
+    If ref_type is 'branch', it will link to changelog.
+    ref_name is shortened if ref_type is 'rev'.
+    if rev is specified, show it too, explicitly linking to that revision.
+    """
+    txt = short_ref_name(ref_type, ref_name)
+    if ref_type == 'branch':
+        u = webutils.url('changelog_home', repo_name=repo_name, branch=ref_name)
+    else:
+        u = webutils.url('changeset_home', repo_name=repo_name, revision=ref_name)
+    l = webutils.link_to(repo_name + '#' + txt, u)
+    if rev and ref_type != 'rev':
+        l = webutils.literal('%s (%s)' % (l, webutils.link_to(rev[:12], webutils.url('changeset_home', repo_name=repo_name, revision=rev))))
+    return l
+
+
 def get_changeset_safe(repo, rev):
     """
     Safe version of get_changeset if this changeset doesn't exists for a
@@ -366,9 +275,6 @@
     :param repo:
     :param rev:
     """
-    from kallithea.lib.vcs.backends.base import BaseRepository
-    from kallithea.lib.vcs.exceptions import RepositoryError
-    from kallithea.lib.vcs.backends.base import EmptyChangeset
     if not isinstance(repo, BaseRepository):
         raise Exception('You must pass an Repository '
                         'object as first argument got %s' % type(repo))
@@ -395,33 +301,6 @@
         return datetime.datetime.fromtimestamp(tm)
 
 
-# Must match regexp in kallithea/public/js/base.js MentionsAutoComplete()
-# Check char before @ - it must not look like we are in an email addresses.
-# Matching is greedy so we don't have to look beyond the end.
-MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])')
-
-
-def extract_mentioned_usernames(text):
-    r"""
-    Returns list of (possible) usernames @mentioned in given text.
-
-    >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,')
-    ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz']
-    """
-    return MENTIONS_REGEX.findall(text)
-
-
-def extract_mentioned_users(text):
-    """ Returns set of actual database Users @mentioned in given text. """
-    from kallithea.model.db import User
-    result = set()
-    for name in extract_mentioned_usernames(text):
-        user = User.get_by_username(name, case_insensitive=True)
-        if user is not None and not user.is_default_user:
-            result.add(user)
-    return result
-
-
 class AttributeDict(dict):
     def __getattr__(self, attr):
         return self.get(attr, None)
@@ -430,8 +309,6 @@
 
 
 def obfuscate_url_pw(engine):
-    from sqlalchemy.engine import url as sa_url
-    from sqlalchemy.exc import ArgumentError
     try:
         _url = sa_url.make_url(engine or '')
     except ArgumentError:
@@ -478,14 +355,13 @@
 
     Must always be called before anything with hooks are invoked.
     """
-    from kallithea import CONFIG
     extras = {
-        'ip': ip_addr, # used in log_push/pull_action action_logger
+        'ip': ip_addr, # used in action_logger
         'username': username,
-        'action': action or 'push_local', # used in log_push_action_raw_ids action_logger
+        'action': action or 'push_local', # used in process_pushed_raw_ids action_logger
         'repository': repo_name,
-        'scm': repo_alias, # used to pick hack in log_push_action_raw_ids
-        'config': CONFIG['__file__'], # used by git hook to read config
+        'scm': repo_alias,
+        'config': kallithea.CONFIG['__file__'], # used by git hook to read config
     }
     os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras)
 
@@ -495,82 +371,12 @@
     Gets kallithea user from threadlocal tmpl_context variable if it's
     defined, else returns None.
     """
-    from tg import tmpl_context
     try:
         return getattr(tmpl_context, 'authuser', None)
     except TypeError:  # No object (name: context) has been registered for this thread
         return None
 
 
-class OptionalAttr(object):
-    """
-    Special Optional Option that defines other attribute. Example::
-
-        def test(apiuser, userid=Optional(OAttr('apiuser')):
-            user = Optional.extract(userid)
-            # calls
-
-    """
-
-    def __init__(self, attr_name):
-        self.attr_name = attr_name
-
-    def __repr__(self):
-        return '<OptionalAttr:%s>' % self.attr_name
-
-    def __call__(self):
-        return self
-
-
-# alias
-OAttr = OptionalAttr
-
-
-class Optional(object):
-    """
-    Defines an optional parameter::
-
-        param = param.getval() if isinstance(param, Optional) else param
-        param = param() if isinstance(param, Optional) else param
-
-    is equivalent of::
-
-        param = Optional.extract(param)
-
-    """
-
-    def __init__(self, type_):
-        self.type_ = type_
-
-    def __repr__(self):
-        return '<Optional:%s>' % self.type_.__repr__()
-
-    def __call__(self):
-        return self.getval()
-
-    def getval(self):
-        """
-        returns value from this Optional instance
-        """
-        if isinstance(self.type_, OAttr):
-            # use params name
-            return self.type_.attr_name
-        return self.type_
-
-    @classmethod
-    def extract(cls, val):
-        """
-        Extracts value from Optional() instance
-
-        :param val:
-        :return: original value if it's not Optional instance else
-            value of instance
-        """
-        if isinstance(val, cls):
-            return val.getval()
-        return val
-
-
 def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub):
     return _cleanstringsub('_', s).rstrip('_')
 
@@ -622,3 +428,118 @@
         if retries < 0:
             raise IOError
         print(complaint)
+
+
+class PasswordGenerator(object):
+    """
+    This is a simple class for generating password from different sets of
+    characters
+    usage::
+
+        passwd_gen = PasswordGenerator()
+        #print 8-letter password containing only big and small letters
+            of alphabet
+        passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
+    """
+    ALPHABETS_NUM = r'''1234567890'''
+    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
+    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
+    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
+    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
+        + ALPHABETS_NUM + ALPHABETS_SPECIAL
+    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
+    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
+    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
+    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
+
+    def gen_password(self, length, alphabet=ALPHABETS_FULL):
+        assert len(alphabet) <= 256, alphabet
+        l = []
+        while len(l) < length:
+            i = ord(os.urandom(1))
+            if i < len(alphabet):
+                l.append(alphabet[i])
+        return ''.join(l)
+
+
+def get_crypt_password(password):
+    """
+    Cryptographic function used for bcrypt password hashing.
+
+    :param password: password to hash
+    """
+    return ascii_str(bcrypt.hashpw(safe_bytes(password), bcrypt.gensalt(10)))
+
+
+def check_password(password, hashed):
+    """
+    Checks password match the hashed value using bcrypt.
+    Remains backwards compatible and accept plain sha256 hashes which used to
+    be used on Windows.
+
+    :param password: password
+    :param hashed: password in hashed form
+    """
+    # sha256 hashes will always be 64 hex chars
+    # bcrypt hashes will always contain $ (and be shorter)
+    if len(hashed) == 64 and all(x in string.hexdigits for x in hashed):
+        return hashlib.sha256(password).hexdigest() == hashed
+    try:
+        return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed))
+    except ValueError as e:
+        # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
+        log.error('error from bcrypt checking password: %s', e)
+        return False
+    log.error('check_password failed - no method found for hash length %s', len(hashed))
+    return False
+
+
+git_req_ver = StrictVersion('1.7.4')
+
+def check_git_version():
+    """
+    Checks what version of git is installed on the system, and raise a system exit
+    if it's too old for Kallithea to work properly.
+    """
+    if 'git' not in kallithea.BACKENDS:
+        return None
+
+    if not settings.GIT_EXECUTABLE_PATH:
+        log.warning('No git executable configured - check "git_path" in the ini file.')
+        return None
+
+    try:
+        stdout, stderr = GitRepository._run_git_command(['--version'])
+    except RepositoryError as e:
+        # message will already have been logged as error
+        log.warning('No working git executable found - check "git_path" in the ini file.')
+        return None
+
+    if stderr:
+        log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, safe_str(stderr))
+
+    if not stdout:
+        log.warning('No working git executable found - check "git_path" in the ini file.')
+        return None
+
+    output = safe_str(stdout).strip()
+    m = re.search(r"\d+.\d+.\d+", output)
+    if m:
+        ver = StrictVersion(m.group(0))
+        log.debug('Git executable: "%s", version %s (parsed from: "%s")',
+                  settings.GIT_EXECUTABLE_PATH, ver, output)
+        if ver < git_req_ver:
+            log.error('Kallithea detected %s version %s, which is too old '
+                      'for the system to function properly. '
+                      'Please upgrade to version %s or later. '
+                      'If you strictly need Mercurial repositories, you can '
+                      'clear the "git_path" setting in the ini file.',
+                      settings.GIT_EXECUTABLE_PATH, ver, git_req_ver)
+            log.error("Terminating ...")
+            sys.exit(1)
+    else:
+        ver = StrictVersion('0.0.0')
+        log.warning('Error finding version number in "%s --version" stdout:\n%s',
+                    settings.GIT_EXECUTABLE_PATH, output)
+
+    return ver
--- a/kallithea/lib/vcs/backends/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -17,7 +17,7 @@
 from kallithea.lib.vcs.utils.paths import abspath
 
 
-def get_repo(path=None, alias=None, create=False):
+def get_repo(path=None, alias=None, create=False, baseui=None):
     """
     Returns ``Repository`` object of type linked with given ``alias`` at
     the specified ``path``. If ``alias`` is not given it will try to guess it
@@ -39,7 +39,7 @@
         alias = get_scm(path)[0]
 
     backend = get_backend(alias)
-    repo = backend(path, create=create)
+    repo = backend(path, create=create, baseui=baseui)
     return repo
 
 
--- a/kallithea/lib/vcs/backends/base.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/base.py	Thu May 27 21:27:37 2021 +0200
@@ -11,7 +11,9 @@
 
 import datetime
 import itertools
+from typing import Sequence
 
+from kallithea.lib.vcs.backends import get_backend
 from kallithea.lib.vcs.conf import settings
 from kallithea.lib.vcs.exceptions import (ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError,
                                           NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError)
@@ -27,7 +29,7 @@
     **Attributes**
 
         ``DEFAULT_BRANCH_NAME``
-            name of default branch (i.e. "trunk" for svn, "master" for git etc.
+            name of default branch (i.e. "master" for git etc.
 
         ``scm``
             alias of scm, i.e. *git* or *hg*
@@ -50,8 +52,12 @@
         ``tags``
             tags as list of changesets
     """
-    scm = None
-    DEFAULT_BRANCH_NAME = None
+    DEFAULT_BRANCH_NAME: str  # assigned in subclass
+    scm: str  # assigned in subclass
+    path: str  # assigned in subclass __init__
+    revisions: Sequence[str]  # LazyProperty in subclass
+    _empty: bool  # property in subclass
+
     EMPTY_CHANGESET = '0' * 40
 
     def __init__(self, repo_path, create=False, **kwargs):
@@ -169,6 +175,20 @@
         """
         raise NotImplementedError
 
+    def get_diff_changesets(self, org_rev, other_repo, other_rev):
+        """
+        Returns lists of changesets that can be merged from this repo @org_rev
+        to other_repo @other_rev
+        ... and the other way
+        ... and the ancestors that would be used for merge
+
+        :param org_rev: the revision we want our compare to be made
+        :param other_repo: repo object, most likely the fork of org_repo. It has
+            all changesets that we need to obtain
+        :param other_rev: revision we want out compare to be made on other_repo
+        """
+        raise NotImplementedError
+
     def __getitem__(self, key):
         if isinstance(key, slice):
             return (self.get_changeset(rev) for rev in self.revisions[key])
@@ -324,8 +344,7 @@
 
         ``short_id``
             shortened (if apply) version of ``raw_id``; it would be simple
-            shortcut for ``raw_id[:12]`` for git/mercurial backends or same
-            as ``raw_id`` for subversion
+            shortcut for ``raw_id[:12]`` for git/mercurial backends
 
         ``revision``
             revision number as integer
@@ -353,6 +372,9 @@
             otherwise; trying to access this attribute while there is no
             changesets would raise ``EmptyRepositoryError``
     """
+    message: str  # LazyProperty in subclass
+    date: datetime.datetime  # LazyProperty in subclass
+
     def __str__(self):
         return '<%s at %s:%s>' % (self.__class__.__name__, self.revision,
             self.short_id)
@@ -1008,13 +1030,11 @@
 
     @LazyProperty
     def branch(self):
-        from kallithea.lib.vcs.backends import get_backend
         return get_backend(self.alias).DEFAULT_BRANCH_NAME
 
     @LazyProperty
     def branches(self):
-        from kallithea.lib.vcs.backends import get_backend
-        return [get_backend(self.alias).DEFAULT_BRANCH_NAME]
+        return [self.branch]
 
     @LazyProperty
     def short_id(self):
--- a/kallithea/lib/vcs/backends/git/changeset.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/git/changeset.py	Thu May 27 21:27:37 2021 +0200
@@ -5,6 +5,7 @@
 
 from dulwich import objects
 from dulwich.config import ConfigFile
+from dulwich.walk import Walker
 
 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 from kallithea.lib.vcs.conf import settings
@@ -294,7 +295,6 @@
 
         """
         self._get_filectx(path)
-        from dulwich.walk import Walker
         include = [self.raw_id]
         walker = Walker(self.repository._repo.object_store, include,
                         paths=[path], max_entries=1)
--- a/kallithea/lib/vcs/backends/git/inmemory.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/git/inmemory.py	Thu May 27 21:27:37 2021 +0200
@@ -9,6 +9,8 @@
 from kallithea.lib.vcs.exceptions import RepositoryError
 from kallithea.lib.vcs.utils import ascii_str, safe_bytes
 
+from . import repository
+
 
 class GitInMemoryChangeset(BaseInMemoryChangeset):
 
@@ -32,9 +34,8 @@
         """
         self.check_integrity(parents)
 
-        from .repository import GitRepository
         if branch is None:
-            branch = GitRepository.DEFAULT_BRANCH_NAME
+            branch = repository.GitRepository.DEFAULT_BRANCH_NAME
 
         repo = self.repository._repo
         object_store = repo.object_store
--- a/kallithea/lib/vcs/backends/git/repository.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/git/repository.py	Thu May 27 21:27:37 2021 +0200
@@ -20,23 +20,23 @@
 from collections import OrderedDict
 
 import mercurial.util  # import url as hg_url
+from dulwich.client import SubprocessGitClient
 from dulwich.config import ConfigFile
 from dulwich.objects import Tag
 from dulwich.repo import NotGitRepository, Repo
+from dulwich.server import update_server_info
 
 from kallithea.lib.vcs import subprocessio
 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 from kallithea.lib.vcs.conf import settings
 from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
                                           TagDoesNotExistError)
-from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str
 from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 from kallithea.lib.vcs.utils.paths import abspath, get_user_home
 
-from .changeset import GitChangeset
-from .inmemory import GitInMemoryChangeset
-from .workdir import GitWorkdir
+from . import changeset, inmemory, workdir
 
 
 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
@@ -52,8 +52,8 @@
     scm = 'git'
 
     def __init__(self, repo_path, create=False, src_url=None,
-                 update_after_clone=False, bare=False):
-
+                 update_after_clone=False, bare=False, baseui=None):
+        baseui  # unused
         self.path = abspath(repo_path)
         self.repo = self._get_repo(create, src_url, update_after_clone, bare)
         self.bare = self.repo.bare
@@ -147,38 +147,84 @@
         stdout, _stderr = self._run_git_command(cmd, cwd=cwd)
         return safe_str(stdout)
 
-    @classmethod
-    def _check_url(cls, url):
+    @staticmethod
+    def _check_url(url):
+        r"""
+        Raise URLError if url doesn't seem like a valid safe Git URL. We
+        only allow http, https, git, and ssh URLs.
+
+        For http and https URLs, make a connection and probe to see if it is valid.
+
+        >>> GitRepository._check_url('git://example.com/my%20fine repo')
+
+        >>> GitRepository._check_url('http://example.com:65537/repo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Error parsing URL: 'http://example.com:65537/repo'>
+        >>> GitRepository._check_url('foo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Unsupported protocol in URL 'foo'>
+        >>> GitRepository._check_url('file:///repo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Unsupported protocol in URL 'file:///repo'>
+        >>> GitRepository._check_url('git+http://example.com/repo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Unsupported protocol in URL 'git+http://example.com/repo'>
+        >>> GitRepository._check_url('git://example.com/%09')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Invalid escape character in path: '%'>
+        >>> GitRepository._check_url('git://example.com/%x00')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Invalid escape character in path: '%'>
+        >>> GitRepository._check_url(r'git://example.com/\u0009')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Invalid escape character in path: '\'>
+        >>> GitRepository._check_url(r'git://example.com/\t')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Invalid escape character in path: '\'>
+        >>> GitRepository._check_url('git://example.com/\t')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Invalid ...>
+
+        The failure above will be one of, depending on the level of WhatWG support:
+        urllib.error.URLError: <urlopen error Invalid whitespace character in path: '\t'>
+        urllib.error.URLError: <urlopen error Invalid url: 'git://example.com/    ' normalizes to 'git://example.com/'>
         """
-        Function will check given url and try to verify if it's a valid
-        link. Sometimes it may happened that git will issue basic
-        auth request that can cause whole API to hang when used from python
-        or other external calls.
+        try:
+            parsed_url = urllib.parse.urlparse(url)
+            parsed_url.port  # trigger netloc parsing which might raise ValueError
+        except ValueError:
+            raise urllib.error.URLError("Error parsing URL: %r" % url)
 
-        On failures it'll raise urllib2.HTTPError, exception is also thrown
-        when the return code is non 200
-        """
         # check first if it's not an local url
         if os.path.isabs(url) and os.path.isdir(url):
-            return True
+            return
 
-        if url.startswith('git://'):
-            try:
-                _git_colon, _empty, _host, path = url.split('/', 3)
-            except ValueError:
-                raise urllib.error.URLError("Invalid URL: %r" % url)
+        unparsed_url = urllib.parse.urlunparse(parsed_url)
+        if unparsed_url != url:
+            raise urllib.error.URLError("Invalid url: '%s' normalizes to '%s'" % (url, unparsed_url))
+
+        if parsed_url.scheme == 'git':
             # Mitigate problems elsewhere with incorrect handling of encoded paths.
             # Don't trust urllib.parse.unquote but be prepared for more flexible implementations elsewhere.
             # Space is the only allowed whitespace character - directly or % encoded. No other % or \ is allowed.
-            for c in path.replace('%20', ' '):
+            for c in parsed_url.path.replace('%20', ' '):
                 if c in '%\\':
                     raise urllib.error.URLError("Invalid escape character in path: '%s'" % c)
                 if c.isspace() and c != ' ':
                     raise urllib.error.URLError("Invalid whitespace character in path: %r" % c)
-            return True
+            return
 
-        if not url.startswith('http://') and not url.startswith('https://'):
-            raise urllib.error.URLError("Unsupported protocol in URL %s" % url)
+        if parsed_url.scheme not in ['http', 'https']:
+            raise urllib.error.URLError("Unsupported protocol in URL %r" % url)
 
         url_obj = mercurial.util.url(safe_bytes(url))
         test_uri, handlers = get_urllib_request_handlers(url_obj)
@@ -211,8 +257,6 @@
             raise urllib.error.URLError(
                 "url [%s] does not look like an git" % cleaned_uri)
 
-        return True
-
     def _get_repo(self, create, src_url=None, update_after_clone=False,
                   bare=False):
         if create and os.path.exists(self.path):
@@ -352,11 +396,6 @@
     def description(self):
         return safe_str(self._repo.get_description() or b'unknown')
 
-    @LazyProperty
-    def contact(self):
-        undefined_contact = 'Unknown'
-        return undefined_contact
-
     @property
     def branches(self):
         if not self.revisions:
@@ -477,9 +516,9 @@
         Returns ``GitChangeset`` object representing commit from git repository
         at the given revision or head (most recent commit) if None given.
         """
-        if isinstance(revision, GitChangeset):
+        if isinstance(revision, changeset.GitChangeset):
             return revision
-        return GitChangeset(repository=self, revision=self._get_revision(revision))
+        return changeset.GitChangeset(repository=self, revision=self._get_revision(revision))
 
     def get_changesets(self, start=None, end=None, start_date=None,
            end_date=None, branch_name=None, reverse=False, max_revisions=None):
@@ -555,6 +594,58 @@
 
         return CollectionGenerator(self, revs)
 
+    def get_diff_changesets(self, org_rev, other_repo, other_rev):
+        """
+        Returns lists of changesets that can be merged from this repo @org_rev
+        to other_repo @other_rev
+        ... and the other way
+        ... and the ancestors that would be used for merge
+
+        :param org_rev: the revision we want our compare to be made
+        :param other_repo: repo object, most likely the fork of org_repo. It has
+            all changesets that we need to obtain
+        :param other_rev: revision we want out compare to be made on other_repo
+        """
+        org_changesets = []
+        ancestors = None
+        if org_rev == other_rev:
+            other_changesets = []
+        elif self != other_repo:
+            gitrepo = Repo(self.path)
+            SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
+
+            gitrepo_remote = Repo(other_repo.path)
+            SubprocessGitClient(thin_packs=False).fetch(self.path, gitrepo_remote)
+
+            revs = [
+                ascii_str(x.commit.id)
+                for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)],
+                                                   exclude=[ascii_bytes(org_rev)])
+            ]
+            other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
+            if other_changesets:
+                ancestors = [other_changesets[0].parents[0].raw_id]
+            else:
+                # no changesets from other repo, ancestor is the other_rev
+                ancestors = [other_rev]
+
+            gitrepo.close()
+            gitrepo_remote.close()
+
+        else:
+            so = self.run_git_command(
+                ['log', '--reverse', '--pretty=format:%H',
+                 '-s', '%s..%s' % (org_rev, other_rev)]
+            )
+            other_changesets = [self.get_changeset(cs)
+                          for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
+            so = self.run_git_command(
+                ['merge-base', org_rev, other_rev]
+            )
+            ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]]
+
+        return other_changesets, org_changesets, ancestors
+
     def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
                  context=3):
         """
@@ -627,7 +718,7 @@
         """
         Returns ``GitInMemoryChangeset`` object for this repository.
         """
-        return GitInMemoryChangeset(self)
+        return inmemory.GitInMemoryChangeset(self)
 
     def clone(self, url, update_after_clone=True, bare=False):
         """
@@ -664,7 +755,7 @@
         url = self._get_url(url)
         so = self.run_git_command(['ls-remote', '-h', url])
         cmd = ['fetch', url, '--']
-        for line in (x for x in so.splitlines()):
+        for line in so.splitlines():
             sha, ref = line.split('\t')
             cmd.append('+%s:%s' % (ref, ref))
         self.run_git_command(cmd)
@@ -673,7 +764,6 @@
         """
         runs gits update-server-info command in this repo instance
         """
-        from dulwich.server import update_server_info
         try:
             update_server_info(self._repo)
         except OSError as e:
@@ -687,7 +777,7 @@
         """
         Returns ``Workdir`` instance for this repository.
         """
-        return GitWorkdir(self)
+        return workdir.GitWorkdir(self)
 
     def get_config_value(self, section, name, config_file=None):
         """
--- a/kallithea/lib/vcs/backends/git/ssh.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,82 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-import logging
-import os
-
-from kallithea.lib.hooks import log_pull_action
-from kallithea.lib.utils import make_ui
-from kallithea.lib.vcs.backends.ssh import BaseSshHandler
-
-
-log = logging.getLogger(__name__)
-
-
-class GitSshHandler(BaseSshHandler):
-    vcs_type = 'git'
-
-    @classmethod
-    def make(cls, ssh_command_parts):
-        r"""
-        >>> import shlex
-
-        >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).repo_name
-        'foo bar'
-        >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).verb
-        'git-upload-pack'
-        >>> GitSshHandler.make(shlex.split(" git-upload-pack /blåbærgrød ")).repo_name # might not be necessary to support no quoting ... but we can
-        'bl\xe5b\xe6rgr\xf8d'
-        >>> GitSshHandler.make(shlex.split('''git-upload-pack "/foo'bar"''')).repo_name
-        "foo'bar"
-        >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).repo_name
-        'foo'
-        >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).verb
-        'git-receive-pack'
-
-        >>> GitSshHandler.make(shlex.split("/bin/git-upload-pack '/foo'")) # ssh-serve will report 'SSH command %r is not supported'
-        >>> GitSshHandler.make(shlex.split('''git-upload-pack /foo bar''')) # ssh-serve will report 'SSH command %r is not supported'
-        >>> shlex.split("git-upload-pack '/foo'bar' x") # ssh-serve will report: Error parsing SSH command "...": No closing quotation
-        Traceback (most recent call last):
-        ValueError: No closing quotation
-        >>> GitSshHandler.make(shlex.split('hg -R foo serve --stdio')) # not handled here
-        """
-        if (len(ssh_command_parts) == 2 and
-            ssh_command_parts[0] in ['git-upload-pack', 'git-receive-pack'] and
-            ssh_command_parts[1].startswith('/')
-        ):
-            return cls(ssh_command_parts[1][1:], ssh_command_parts[0])
-
-        return None
-
-    def __init__(self, repo_name, verb):
-        BaseSshHandler.__init__(self, repo_name)
-        self.verb = verb
-
-    def _serve(self):
-        if self.verb == 'git-upload-pack': # action 'pull'
-            # base class called set_hook_environment - action is hardcoded to 'pull'
-            log_pull_action(ui=make_ui(), repo=self.db_repo.scm_instance._repo)
-        else: # probably verb 'git-receive-pack', action 'push'
-            if not self.allow_push:
-                self.exit('Push access to %r denied' % self.repo_name)
-            # Note: push logging is handled by Git post-receive hook
-
-        # git shell is not a real shell but use shell inspired quoting *inside* the argument.
-        # Per https://github.com/git/git/blob/v2.22.0/quote.c#L12 :
-        # The path must be "'" quoted, but "'" and "!" must exit the quoting and be "\" escaped
-        quoted_abspath = "'%s'" % self.db_repo.repo_full_path.replace("'", r"'\''").replace("!", r"'\!'")
-        newcmd = ['git', 'shell', '-c', "%s %s" % (self.verb, quoted_abspath)]
-        log.debug('Serving: %s', newcmd)
-        os.execvp(newcmd[0], newcmd)
-        self.exit("Failed to exec 'git' as %s" % newcmd)
--- a/kallithea/lib/vcs/backends/git/workdir.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/git/workdir.py	Thu May 27 21:27:37 2021 +0200
@@ -1,8 +1,8 @@
 import re
 
-from kallithea.lib.utils2 import ascii_str, safe_str
 from kallithea.lib.vcs.backends.base import BaseWorkdir
 from kallithea.lib.vcs.exceptions import BranchDoesNotExistError, RepositoryError
+from kallithea.lib.vcs.utils import ascii_str, safe_str
 
 
 class GitWorkdir(BaseWorkdir):
--- a/kallithea/lib/vcs/backends/hg/inmemory.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/hg/inmemory.py	Thu May 27 21:27:37 2021 +0200
@@ -7,6 +7,8 @@
 from kallithea.lib.vcs.exceptions import RepositoryError
 from kallithea.lib.vcs.utils import ascii_str, safe_bytes, safe_str
 
+from . import repository
+
 
 class MercurialInMemoryChangeset(BaseInMemoryChangeset):
 
@@ -35,9 +37,8 @@
         if not isinstance(author, str):
             raise RepositoryError('author must be a str - got %r' % type(author))
 
-        from .repository import MercurialRepository
         if branch is None:
-            branch = MercurialRepository.DEFAULT_BRANCH_NAME
+            branch = repository.MercurialRepository.DEFAULT_BRANCH_NAME
         kwargs[b'branch'] = safe_bytes(branch)
 
         def filectxfn(_repo, memctx, bytes_path):
--- a/kallithea/lib/vcs/backends/hg/repository.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/hg/repository.py	Thu May 27 21:27:37 2021 +0200
@@ -33,19 +33,18 @@
 import mercurial.sshpeer
 import mercurial.tags
 import mercurial.ui
+import mercurial.unionrepo
 import mercurial.util
 
 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
                                           TagDoesNotExistError, VCSError)
-from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str
 from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers
 from kallithea.lib.vcs.utils.lazy import LazyProperty
 from kallithea.lib.vcs.utils.paths import abspath
 
-from .changeset import MercurialChangeset
-from .inmemory import MercurialInMemoryChangeset
-from .workdir import MercurialWorkdir
+from . import changeset, inmemory, workdir
 
 
 log = logging.getLogger(__name__)
@@ -272,7 +271,7 @@
             self.get_changeset(rev1)
         self.get_changeset(rev2)
         if path:
-            file_filter = mercurial.match.exact(path)
+            file_filter = mercurial.match.exact([safe_bytes(path)])
         else:
             file_filter = None
 
@@ -282,31 +281,60 @@
                                         ignorews=ignore_whitespace,
                                         context=context)))
 
-    @classmethod
-    def _check_url(cls, url, repoui=None):
-        """
-        Function will check given url and try to verify if it's a valid
-        link. Sometimes it may happened that mercurial will issue basic
-        auth request that can cause whole API to hang when used from python
-        or other external calls.
+    @staticmethod
+    def _check_url(url, repoui=None):
+        r"""
+        Raise URLError if url doesn't seem like a valid safe Hg URL. We
+        only allow http, https, ssh, and hg-git URLs.
+
+        For http, https and git URLs, make a connection and probe to see if it is valid.
 
         On failures it'll raise urllib2.HTTPError, exception is also thrown
         when the return code is non 200
+
+        >>> MercurialRepository._check_url('file:///repo')
+
+        >>> MercurialRepository._check_url('http://example.com:65537/repo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Error parsing URL: 'http://example.com:65537/repo'>
+        >>> MercurialRepository._check_url('foo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Unsupported protocol in URL 'foo'>
+        >>> MercurialRepository._check_url('git+ssh://example.com/my%20fine repo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Unsupported protocol in URL 'git+ssh://example.com/my%20fine repo'>
+        >>> MercurialRepository._check_url('svn+http://example.com/repo')
+        Traceback (most recent call last):
+        ...
+        urllib.error.URLError: <urlopen error Unsupported protocol in URL 'svn+http://example.com/repo'>
         """
+        try:
+            parsed_url = urllib.parse.urlparse(url)
+            parsed_url.port  # trigger netloc parsing which might raise ValueError
+        except ValueError:
+            raise urllib.error.URLError("Error parsing URL: %r" % url)
+
         # check first if it's not an local url
+        if os.path.isabs(url) and os.path.isdir(url) or parsed_url.scheme == 'file':
+            # When creating repos, _get_url will use file protocol for local paths
+            return
+
+        if parsed_url.scheme not in ['http', 'https', 'ssh', 'git+http', 'git+https']:
+            raise urllib.error.URLError("Unsupported protocol in URL %r" % url)
+
         url = safe_bytes(url)
-        if os.path.isdir(url) or url.startswith(b'file:'):
-            return True
 
-        if url.startswith(b'ssh:'):
+        if parsed_url.scheme == 'ssh':
             # in case of invalid uri or authentication issues, sshpeer will
             # throw an exception.
             mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
-            return True
+            return
 
-        url_prefix = None
-        if b'+' in url[:url.find(b'://')]:
-            url_prefix, url = url.split(b'+', 1)
+        if '+' in parsed_url.scheme:  # strip 'git+' for hg-git URLs
+            url = url.split(b'+', 1)[1]
 
         url_obj = mercurial.util.url(url)
         test_uri, handlers = get_urllib_request_handlers(url_obj)
@@ -335,7 +363,7 @@
             # means it cannot be cloned
             raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
 
-        if not url_prefix: # skip svn+http://... (and git+... too)
+        if parsed_url.scheme in ['http', 'https']:  # skip git+http://... etc
             # now check if it's a proper hg repo
             try:
                 mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
@@ -344,8 +372,6 @@
                     "url [%s] does not look like an hg repo org_exc: %s"
                     % (cleaned_uri, e))
 
-        return True
-
     def _get_repo(self, create, src_url=None, update_after_clone=False):
         """
         Function will check for mercurial repository in given path and return
@@ -358,12 +384,12 @@
         """
         try:
             if src_url:
-                url = safe_bytes(self._get_url(src_url))
+                url = self._get_url(src_url)
                 opts = {}
                 if not update_after_clone:
                     opts.update({'noupdate': True})
                 MercurialRepository._check_url(url, self.baseui)
-                mercurial.commands.clone(self.baseui, url, safe_bytes(self.path), **opts)
+                mercurial.commands.clone(self.baseui, safe_bytes(url), safe_bytes(self.path), **opts)
 
                 # Don't try to create if we've already cloned repo
                 create = False
@@ -379,7 +405,7 @@
 
     @LazyProperty
     def in_memory_changeset(self):
-        return MercurialInMemoryChangeset(self)
+        return inmemory.MercurialInMemoryChangeset(self)
 
     @LazyProperty
     def description(self):
@@ -387,11 +413,6 @@
         return safe_str(_desc or b'unknown')
 
     @LazyProperty
-    def contact(self):
-        return safe_str(mercurial.hgweb.common.get_contact(self._repo.ui.config)
-                            or b'Unknown')
-
-    @LazyProperty
     def last_change(self):
         """
         Returns last change made on this repository as datetime object
@@ -489,7 +510,7 @@
         Returns ``MercurialChangeset`` object representing repository's
         changeset at the given ``revision``.
         """
-        return MercurialChangeset(repository=self, revision=self._get_revision(revision))
+        return changeset.MercurialChangeset(repository=self, revision=self._get_revision(revision))
 
     def get_changesets(self, start=None, end=None, start_date=None,
                        end_date=None, branch_name=None, reverse=False, max_revisions=None):
@@ -545,6 +566,60 @@
 
         return CollectionGenerator(self, revs)
 
+    def get_diff_changesets(self, org_rev, other_repo, other_rev):
+        """
+        Returns lists of changesets that can be merged from this repo @org_rev
+        to other_repo @other_rev
+        ... and the other way
+        ... and the ancestors that would be used for merge
+
+        :param org_rev: the revision we want our compare to be made
+        :param other_repo: repo object, most likely the fork of org_repo. It has
+            all changesets that we need to obtain
+        :param other_rev: revision we want out compare to be made on other_repo
+        """
+        ancestors = None
+        if org_rev == other_rev:
+            org_changesets = []
+            other_changesets = []
+
+        else:
+            # case two independent repos
+            if self != other_repo:
+                hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui,
+                                                       safe_bytes(other_repo.path),
+                                                       safe_bytes(self.path))
+                # all ancestors of other_rev will be in other_repo and
+                # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
+
+            # no remote compare do it on the same repository
+            else:
+                hgrepo = other_repo._repo
+
+            ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
+                         hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))]
+            if ancestors:
+                log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev)
+            else:
+                log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
+                ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
+                             hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive!
+
+            other_changesets = [
+                other_repo.get_changeset(rev)
+                for rev in hgrepo.revs(
+                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
+                    ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev))
+            ]
+            org_changesets = [
+                self.get_changeset(ascii_str(hgrepo[rev].hex()))
+                for rev in hgrepo.revs(
+                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
+                    ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev))
+            ]
+
+        return other_changesets, org_changesets, ancestors
+
     def pull(self, url):
         """
         Tries to pull changes from external location.
@@ -561,7 +636,7 @@
         """
         Returns ``Workdir`` instance for this repository.
         """
-        return MercurialWorkdir(self)
+        return workdir.MercurialWorkdir(self)
 
     def get_config_value(self, section, name=None, config_file=None):
         """
--- a/kallithea/lib/vcs/backends/hg/ssh.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-import logging
-
-import mercurial.hg
-import mercurial.wireprotoserver
-
-from kallithea.lib.utils import make_ui
-from kallithea.lib.vcs.backends.ssh import BaseSshHandler
-from kallithea.lib.vcs.utils import safe_bytes
-
-
-log = logging.getLogger(__name__)
-
-
-class MercurialSshHandler(BaseSshHandler):
-    vcs_type = 'hg'
-
-    @classmethod
-    def make(cls, ssh_command_parts):
-        r"""
-        >>> import shlex
-
-        >>> MercurialSshHandler.make(shlex.split('hg -R "foo bar" serve --stdio')).repo_name
-        'foo bar'
-        >>> MercurialSshHandler.make(shlex.split(' hg -R blåbærgrød serve --stdio ')).repo_name
-        'bl\xe5b\xe6rgr\xf8d'
-        >>> MercurialSshHandler.make(shlex.split('''hg -R 'foo"bar' serve --stdio''')).repo_name
-        'foo"bar'
-
-        >>> MercurialSshHandler.make(shlex.split('/bin/hg -R "foo" serve --stdio'))
-        >>> MercurialSshHandler.make(shlex.split('''hg -R "foo"bar" serve --stdio''')) # ssh-serve will report: Error parsing SSH command "...": invalid syntax
-        Traceback (most recent call last):
-        ValueError: No closing quotation
-        >>> MercurialSshHandler.make(shlex.split('git-upload-pack "/foo"')) # not handled here
-        """
-        if ssh_command_parts[:2] == ['hg', '-R'] and ssh_command_parts[3:] == ['serve', '--stdio']:
-            return cls(ssh_command_parts[2])
-
-        return None
-
-    def _serve(self):
-        # Note: we want a repo with config based on .hg/hgrc and can thus not use self.db_repo.scm_instance._repo.ui
-        baseui = make_ui(repo_path=self.db_repo.repo_full_path)
-        if not self.allow_push:
-            baseui.setconfig(b'hooks', b'pretxnopen._ssh_reject', b'python:kallithea.lib.hooks.rejectpush')
-            baseui.setconfig(b'hooks', b'prepushkey._ssh_reject', b'python:kallithea.lib.hooks.rejectpush')
-
-        repo = mercurial.hg.repository(baseui, safe_bytes(self.db_repo.repo_full_path))
-        log.debug("Starting Mercurial sshserver for %s", self.db_repo.repo_full_path)
-        mercurial.wireprotoserver.sshserver(baseui, repo).serve_forever()
--- a/kallithea/lib/vcs/backends/hg/workdir.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/backends/hg/workdir.py	Thu May 27 21:27:37 2021 +0200
@@ -21,4 +21,8 @@
             raise BranchDoesNotExistError
 
         raw_id = self.repository.branches[branch]
-        mercurial.merge.update(self.repository._repo, ascii_bytes(raw_id), False, False, None)
+        try:
+            mercurial.merge.update(self.repository._repo[ascii_bytes(raw_id)])
+        except TypeError:  # mergeupdate() missing 3 required positional arguments: 'node', 'branchmerge', and 'force'
+            # update-update was introduced with Mercurial 5.6 (2c86b9587740/c1b603cdc95a)
+            mercurial.merge.update(self.repository._repo, ascii_bytes(raw_id), False, False, None)
--- a/kallithea/lib/vcs/backends/ssh.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,109 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-"""
-vcs.backends.ssh
-~~~~~~~~~~~~~~~~~
-
-SSH backend for all available SCMs
-"""
-
-import datetime
-import logging
-import sys
-
-from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
-from kallithea.lib.utils2 import set_hook_environment
-from kallithea.model.db import Repository, User, UserSshKeys
-from kallithea.model.meta import Session
-
-
-log = logging.getLogger(__name__)
-
-
-class BaseSshHandler(object):
-    # Protocol for setting properties:
-    # Set by sub class:
-    #   vcs_type: 'hg' or 'git'
-    # Set by make() / __init__():
-    #   repo_name: requested repo name - only validated by serve()
-    # Set by serve() - must not be accessed before:
-    #   db_repo: repository db object
-    #   authuser: user that has been authenticated - like request.authuser ... which isn't used here
-    #   allow_push: false for read-only access to the repo
-
-    # Set defaults, in case .exit should be called early
-    vcs_type = None
-    repo_name = None
-
-    @staticmethod
-    def make(ssh_command):
-        """Factory function. Given a command as invoked over SSH (and preserved
-        in SSH_ORIGINAL_COMMAND when run as authorized_keys command), return a
-        handler if the command looks ok, else return None.
-        """
-        raise NotImplementedError
-
-    def __init__(self, repo_name):
-        self.repo_name = repo_name.rstrip('/')
-
-    def serve(self, user_id, key_id, client_ip):
-        """Verify basic sanity of the repository, and that the user is
-        valid and has access - then serve the native VCS protocol for
-        repository access."""
-        dbuser = User.get(user_id)
-        if dbuser is None:
-            self.exit('User %r not found' % user_id)
-        self.authuser = AuthUser.make(dbuser=dbuser, ip_addr=client_ip)
-        log.info('Authorized user %s from SSH %s trusting user id %s and key id %s for %r', dbuser, client_ip, user_id, key_id, self.repo_name)
-        if self.authuser is None: # not ok ... but already kind of authenticated by SSH ... but not really not authorized ...
-            self.exit('User %s from %s cannot be authorized' % (dbuser.username, client_ip))
-
-        ssh_key = UserSshKeys.get(key_id)
-        if ssh_key is None:
-            self.exit('SSH key %r not found' % key_id)
-        ssh_key.last_seen = datetime.datetime.now()
-        Session().commit()
-
-        if HasPermissionAnyMiddleware('repository.write',
-                                      'repository.admin')(self.authuser, self.repo_name):
-            self.allow_push = True
-        elif HasPermissionAnyMiddleware('repository.read')(self.authuser, self.repo_name):
-            self.allow_push = False
-        else:
-            self.exit('Access to %r denied' % self.repo_name)
-
-        self.db_repo = Repository.get_by_repo_name(self.repo_name)
-        if self.db_repo is None:
-            self.exit("Repository '%s' not found" % self.repo_name)
-        assert self.db_repo.repo_name == self.repo_name
-
-        # Set global hook environment up for 'push' actions.
-        # If pull actions should be served, the actual hook invocation will be
-        # hardcoded to 'pull' when log_pull_action is invoked (directly on Git,
-        # or through the Mercurial 'outgoing' hook).
-        # For push actions, the action in global hook environment is used (in
-        # handle_git_post_receive when it is called as Git post-receive hook,
-        # or in log_push_action through the Mercurial 'changegroup' hook).
-        set_hook_environment(self.authuser.username, client_ip, self.repo_name, self.vcs_type, 'push')
-        return self._serve()
-
-    def _serve(self):
-        """Serve the native protocol for repository access."""
-        raise NotImplementedError
-
-    def exit(self, error):
-        log.info('abort serving %s %s: %s', self.vcs_type, self.repo_name, error)
-        sys.stderr.write('abort: %s\n' % error)
-        sys.exit(1)
--- a/kallithea/lib/vcs/nodes.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/nodes.py	Thu May 27 21:27:37 2021 +0200
@@ -14,6 +14,8 @@
 import posixpath
 import stat
 
+from pygments import lexers
+
 from kallithea.lib.vcs.backends.base import EmptyChangeset
 from kallithea.lib.vcs.exceptions import NodeError, RemovedFileNodeError
 from kallithea.lib.vcs.utils import safe_bytes, safe_str
@@ -305,7 +307,6 @@
                 encoding = None
 
                 # try with pygments
-                from pygments import lexers
                 try:
                     mt = lexers.get_lexer_for_filename(self.name).mimetypes
                 except lexers.ClassNotFound:
@@ -335,7 +336,6 @@
         Returns pygment's lexer class. Would try to guess lexer taking file's
         content, name and mimetype.
         """
-        from pygments import lexers
         try:
             lexer = lexers.guess_lexer_for_filename(self.name, safe_str(self.content), stripnl=False)
         except lexers.ClassNotFound:
@@ -587,7 +587,7 @@
         self.path = name.rstrip('/')
         self.kind = NodeKind.SUBMODULE
         self.alias = alias
-        # we have to use emptyChangeset here since this can point to svn/git/hg
+        # we have to use emptyChangeset here since this can point to git/hg
         # submodules we cannot get from repository
         self.changeset = EmptyChangeset(changeset, alias=alias)
         self.url = url
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/vcs/ssh/base.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+vcs.backends.ssh
+~~~~~~~~~~~~~~~~~
+
+SSH backend for all available SCMs
+"""
+
+import datetime
+import logging
+import sys
+
+from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
+from kallithea.lib.utils2 import set_hook_environment
+from kallithea.model import db, meta
+
+
+log = logging.getLogger(__name__)
+
+
+class BaseSshHandler(object):
+    # Protocol for setting properties:
+    # Set by sub class:
+    #   vcs_type: 'hg' or 'git'
+    # Set by make() / __init__():
+    #   repo_name: requested repo name - only validated by serve()
+    # Set by serve() - must not be accessed before:
+    #   db_repo: repository db object
+    #   authuser: user that has been authenticated - like request.authuser ... which isn't used here
+    #   allow_push: false for read-only access to the repo
+
+    # Set defaults, in case .exit should be called early
+    vcs_type = None
+    repo_name = None
+
+    @staticmethod
+    def make(ssh_command):
+        """Factory function. Given a command as invoked over SSH (and preserved
+        in SSH_ORIGINAL_COMMAND when run as authorized_keys command), return a
+        handler if the command looks ok, else return None.
+        """
+        raise NotImplementedError
+
+    def __init__(self, repo_name):
+        self.repo_name = repo_name.rstrip('/')
+
+    def serve(self, user_id, key_id, client_ip):
+        """Verify basic sanity of the repository, and that the user is
+        valid and has access - then serve the native VCS protocol for
+        repository access."""
+        dbuser = db.User.get(user_id)
+        if dbuser is None:
+            self.exit('User %r not found' % user_id)
+        self.authuser = AuthUser.make(dbuser=dbuser, ip_addr=client_ip)
+        log.info('Authorized user %s from SSH %s trusting user id %s and key id %s for %r', dbuser, client_ip, user_id, key_id, self.repo_name)
+        if self.authuser is None: # not ok ... but already kind of authenticated by SSH ... but not really not authorized ...
+            self.exit('User %s from %s cannot be authorized' % (dbuser.username, client_ip))
+
+        ssh_key = db.UserSshKeys.get(key_id)
+        if ssh_key is None:
+            self.exit('SSH key %r not found' % key_id)
+        ssh_key.last_seen = datetime.datetime.now()
+        meta.Session().commit()
+
+        if HasPermissionAnyMiddleware('repository.write',
+                                      'repository.admin')(self.authuser, self.repo_name):
+            self.allow_push = True
+        elif HasPermissionAnyMiddleware('repository.read')(self.authuser, self.repo_name):
+            self.allow_push = False
+        else:
+            self.exit('Access to %r denied' % self.repo_name)
+
+        self.db_repo = db.Repository.get_by_repo_name(self.repo_name)
+        if self.db_repo is None:
+            self.exit("Repository '%s' not found" % self.repo_name)
+        assert self.db_repo.repo_name == self.repo_name
+
+        # Set global hook environment up for 'push' actions.
+        # For push actions, the action in global hook environment is used in
+        # process_pushed_raw_ids (which it is called as Git post-receive hook,
+        # or Mercurial 'changegroup' hook).
+        # For pull actions, the actual hook in log_pull_action (called directly
+        # on Git, or through the 'outgoing' Mercurial hook) is hardcoded to
+        # ignore the environment action and always use 'pull'.
+        set_hook_environment(self.authuser.username, client_ip, self.repo_name, self.vcs_type, 'push')
+        self._serve()
+
+    def _serve(self):
+        """Serve the native protocol for repository access."""
+        raise NotImplementedError
+
+    def exit(self, error):
+        log.info('abort serving %s %s: %s', self.vcs_type, self.repo_name, error)
+        sys.stderr.write('abort: %s\n' % error)
+        sys.exit(1)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/vcs/ssh/git.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+import os
+
+from kallithea.lib import hooks
+from kallithea.lib.vcs.ssh import base
+
+
+log = logging.getLogger(__name__)
+
+
+class GitSshHandler(base.BaseSshHandler):
+    vcs_type = 'git'
+
+    @classmethod
+    def make(cls, ssh_command_parts):
+        r"""
+        >>> import shlex
+
+        >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).repo_name
+        'foo bar'
+        >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).verb
+        'git-upload-pack'
+        >>> GitSshHandler.make(shlex.split(" git-upload-pack /blåbærgrød ")).repo_name # might not be necessary to support no quoting ... but we can
+        'bl\xe5b\xe6rgr\xf8d'
+        >>> GitSshHandler.make(shlex.split('''git-upload-pack "/foo'bar"''')).repo_name
+        "foo'bar"
+        >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).repo_name
+        'foo'
+        >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).verb
+        'git-receive-pack'
+
+        >>> GitSshHandler.make(shlex.split("/bin/git-upload-pack '/foo'")) # ssh-serve will report 'SSH command %r is not supported'
+        >>> GitSshHandler.make(shlex.split('''git-upload-pack /foo bar''')) # ssh-serve will report 'SSH command %r is not supported'
+        >>> shlex.split("git-upload-pack '/foo'bar' x") # ssh-serve will report: Error parsing SSH command "...": No closing quotation
+        Traceback (most recent call last):
+        ValueError: No closing quotation
+        >>> GitSshHandler.make(shlex.split('hg -R foo serve --stdio')) # not handled here
+        """
+        if (len(ssh_command_parts) == 2 and
+            ssh_command_parts[0] in ['git-upload-pack', 'git-receive-pack'] and
+            ssh_command_parts[1].startswith('/')
+        ):
+            return cls(ssh_command_parts[1][1:], ssh_command_parts[0])
+
+        return None
+
+    def __init__(self, repo_name, verb):
+        base.BaseSshHandler.__init__(self, repo_name)
+        self.verb = verb
+
+    def _serve(self):
+        if self.verb == 'git-upload-pack': # action 'pull'
+            # base class called set_hook_environment with 'push' action ... but log_pull_action ignores that and will 'pull'
+            hooks.log_pull_action()
+        else: # probably verb 'git-receive-pack', action 'push'
+            if not self.allow_push:
+                self.exit('Push access to %r denied' % self.repo_name)
+            # Note: push logging is handled by Git post-receive hook
+
+        # git shell is not a real shell but use shell inspired quoting *inside* the argument.
+        # Per https://github.com/git/git/blob/v2.22.0/quote.c#L12 :
+        # The path must be "'" quoted, but "'" and "!" must exit the quoting and be "\" escaped
+        quoted_abspath = "'%s'" % self.db_repo.repo_full_path.replace("'", r"'\''").replace("!", r"'\!'")
+        newcmd = ['git', 'shell', '-c', "%s %s" % (self.verb, quoted_abspath)]
+        log.debug('Serving: %s', newcmd)
+        os.execvp(newcmd[0], newcmd)
+        self.exit("Failed to exec 'git' as %s" % newcmd)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/vcs/ssh/hg.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+
+import mercurial.hg
+import mercurial.wireprotoserver
+
+from kallithea.lib.utils import make_ui
+from kallithea.lib.vcs.ssh import base
+from kallithea.lib.vcs.utils import safe_bytes
+
+
+log = logging.getLogger(__name__)
+
+
+class MercurialSshHandler(base.BaseSshHandler):
+    vcs_type = 'hg'
+
+    @classmethod
+    def make(cls, ssh_command_parts):
+        r"""
+        >>> import shlex
+
+        >>> MercurialSshHandler.make(shlex.split('hg -R "foo bar" serve --stdio')).repo_name
+        'foo bar'
+        >>> MercurialSshHandler.make(shlex.split(' hg -R blåbærgrød serve --stdio ')).repo_name
+        'bl\xe5b\xe6rgr\xf8d'
+        >>> MercurialSshHandler.make(shlex.split('''hg -R 'foo"bar' serve --stdio''')).repo_name
+        'foo"bar'
+
+        >>> MercurialSshHandler.make(shlex.split('/bin/hg -R "foo" serve --stdio'))
+        >>> MercurialSshHandler.make(shlex.split('''hg -R "foo"bar" serve --stdio''')) # ssh-serve will report: Error parsing SSH command "...": invalid syntax
+        Traceback (most recent call last):
+        ValueError: No closing quotation
+        >>> MercurialSshHandler.make(shlex.split('git-upload-pack "/foo"')) # not handled here
+        """
+        if ssh_command_parts[:2] == ['hg', '-R'] and ssh_command_parts[3:] == ['serve', '--stdio']:
+            return cls(ssh_command_parts[2])
+
+        return None
+
+    def _serve(self):
+        # Note: we want a repo with config based on .hg/hgrc and can thus not use self.db_repo.scm_instance._repo.ui
+        baseui = make_ui(repo_path=self.db_repo.repo_full_path)
+        if not self.allow_push:
+            baseui.setconfig(b'hooks', b'pretxnopen._ssh_reject', b'python:kallithea.bin.vcs_hooks.rejectpush')
+            baseui.setconfig(b'hooks', b'prepushkey._ssh_reject', b'python:kallithea.bin.vcs_hooks.rejectpush')
+
+        repo = mercurial.hg.repository(baseui, safe_bytes(self.db_repo.repo_full_path))
+        log.debug("Starting Mercurial sshserver for %s", self.db_repo.repo_full_path)
+        mercurial.wireprotoserver.sshserver(baseui, repo).serve_forever()
--- a/kallithea/lib/vcs/subprocessio.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/subprocessio.py	Thu May 27 21:27:37 2021 +0200
@@ -380,23 +380,23 @@
             if (returncode is not None # process has terminated
                 and returncode != 0
             ): # and it failed
-                self.output.stop()
+                getattr(self.output, 'stop', lambda: None)()
                 self.error.stop()
                 err = ''.join(self.error)
                 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
         return next(self.output)
 
     def throw(self, type, value=None, traceback=None):
-        if self.output.length or not self.output.done_reading:
+        if getattr(self.output, 'length') or not getattr(self.output, 'done_reading'):
             raise type(value)
 
     def close(self):
         try:
-            self.process.terminate()
+            getattr(self.process, 'terminate', lambda: None)()
         except:
             pass
         try:
-            self.output.close()
+            getattr(self.output, 'close', lambda: None)()
         except:
             pass
         try:
--- a/kallithea/lib/vcs/utils/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/utils/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -9,6 +9,10 @@
 import re
 import time
 
+import chardet
+
+from kallithea.lib.vcs.conf import settings
+
 
 def makedate():
     lt = time.localtime()
@@ -81,7 +85,6 @@
     if not isinstance(s, bytes):  # use __str__ and don't expect UnicodeDecodeError
         return str(s)
 
-    from kallithea.lib.vcs.conf import settings
     for enc in settings.DEFAULT_ENCODINGS:
         try:
             return str(s, enc)
@@ -89,11 +92,10 @@
             pass
 
     try:
-        import chardet
         encoding = chardet.detect(s)['encoding']
         if encoding is not None:
             return s.decode(encoding)
-    except (ImportError, UnicodeDecodeError):
+    except UnicodeDecodeError:
         pass
 
     return str(s, settings.DEFAULT_ENCODINGS[0], 'replace')
@@ -110,7 +112,6 @@
 
     assert isinstance(s, str), repr(s)  # bytes cannot coerse with __str__ or handle None or int
 
-    from kallithea.lib.vcs.conf import settings
     for enc in settings.DEFAULT_ENCODINGS:
         try:
             return s.encode(enc)
--- a/kallithea/lib/vcs/utils/helpers.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/lib/vcs/utils/helpers.py	Thu May 27 21:27:37 2021 +0200
@@ -3,13 +3,18 @@
 """
 
 import datetime
+import logging
 import os
 import re
 import time
 import urllib.request
 
 import mercurial.url
+from pygments import highlight
+from pygments.formatters import TerminalFormatter
+from pygments.lexers import ClassNotFound, guess_lexer_for_filename
 
+from kallithea.lib.vcs import backends
 from kallithea.lib.vcs.exceptions import RepositoryError, VCSError
 from kallithea.lib.vcs.utils import safe_str
 from kallithea.lib.vcs.utils.paths import abspath
@@ -67,7 +72,6 @@
 
     :raises VCSError: if given ``path`` is not a directory
     """
-    from kallithea.lib.vcs.backends import get_backend
     if hasattr(path, '__call__'):
         path = path()
     if not os.path.isdir(path):
@@ -87,7 +91,7 @@
         # We still need to check if it's not bare repository as
         # bare repos don't have working directories
         try:
-            get_backend(key)(path)
+            backends.get_backend(key)(path)
             result.append(key)
             continue
         except RepositoryError:
@@ -99,22 +103,34 @@
     return result
 
 
+def get_scm_size(alias, root_path):
+    if not alias.startswith('.'):
+        alias += '.'
+
+    size_scm, size_root = 0, 0
+    for path, dirs, files in os.walk(root_path):
+        if path.find(alias) != -1:
+            for f in files:
+                try:
+                    size_scm += os.path.getsize(os.path.join(path, f))
+                except OSError:
+                    pass
+        else:
+            for f in files:
+                try:
+                    size_root += os.path.getsize(os.path.join(path, f))
+                except OSError:
+                    pass
+
+    return size_scm, size_root
+
+
 def get_highlighted_code(name, code, type='terminal'):
     """
     If pygments are available on the system
     then returned output is colored. Otherwise
     unchanged content is returned.
     """
-    import logging
-    try:
-        import pygments
-        pygments
-    except ImportError:
-        return code
-    from pygments import highlight
-    from pygments.lexers import guess_lexer_for_filename, ClassNotFound
-    from pygments.formatters import TerminalFormatter
-
     try:
         lexer = guess_lexer_for_filename(name, code)
         formatter = TerminalFormatter()
--- a/kallithea/lib/vcs/utils/progressbar.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,422 +0,0 @@
-# encoding: UTF-8
-
-import datetime
-import string
-import sys
-
-from kallithea.lib.vcs.utils.filesize import filesizeformat
-
-
-class ProgressBarError(Exception):
-    pass
-
-
-class AlreadyFinishedError(ProgressBarError):
-    pass
-
-
-class ProgressBar(object):
-
-    default_elements = ['percentage', 'bar', 'steps']
-
-    def __init__(self, steps=100, stream=None, elements=None):
-        self.step = 0
-        self.steps = steps
-        self.stream = stream or sys.stderr
-        self.bar_char = '='
-        self.width = 50
-        self.separator = ' | '
-        self.elements = elements or self.default_elements
-        self.started = None
-        self.finished = False
-        self.steps_label = 'Step'
-        self.time_label = 'Time'
-        self.eta_label = 'ETA'
-        self.speed_label = 'Speed'
-        self.transfer_label = 'Transfer'
-
-    def __str__(self):
-        return self.get_line()
-
-    def __iter__(self):
-        start = self.step
-        end = self.steps + 1
-        for x in range(start, end):
-            self.render(x)
-            yield x
-
-    def get_separator(self):
-        return self.separator
-
-    def get_bar_char(self):
-        return self.bar_char
-
-    def get_bar(self):
-        char = self.get_bar_char()
-        perc = self.get_percentage()
-        length = int(self.width * perc / 100)
-        bar = char * length
-        bar = bar.ljust(self.width)
-        return bar
-
-    def get_elements(self):
-        return self.elements
-
-    def get_template(self):
-        separator = self.get_separator()
-        elements = self.get_elements()
-        return string.Template(separator.join((('$%s' % e) for e in elements)))
-
-    def get_total_time(self, current_time=None):
-        if current_time is None:
-            current_time = datetime.datetime.now()
-        if not self.started:
-            return datetime.timedelta()
-        return current_time - self.started
-
-    def get_rendered_total_time(self):
-        delta = self.get_total_time()
-        if not delta:
-            ttime = '-'
-        else:
-            ttime = str(delta)
-        return '%s %s' % (self.time_label, ttime)
-
-    def get_eta(self, current_time=None):
-        if current_time is None:
-            current_time = datetime.datetime.now()
-        if self.step == 0:
-            return datetime.timedelta()
-        total_seconds = self.get_total_time().total_seconds()
-        eta_seconds = total_seconds * self.steps / self.step - total_seconds
-        return datetime.timedelta(seconds=int(eta_seconds))
-
-    def get_rendered_eta(self):
-        eta = self.get_eta()
-        if not eta:
-            eta = '--:--:--'
-        else:
-            eta = str(eta).rjust(8)
-        return '%s: %s' % (self.eta_label, eta)
-
-    def get_percentage(self):
-        return float(self.step) / self.steps * 100
-
-    def get_rendered_percentage(self):
-        perc = self.get_percentage()
-        return ('%s%%' % (int(perc))).rjust(5)
-
-    def get_rendered_steps(self):
-        return '%s: %s/%s' % (self.steps_label, self.step, self.steps)
-
-    def get_rendered_speed(self, step=None, total_seconds=None):
-        if step is None:
-            step = self.step
-        if total_seconds is None:
-            total_seconds = self.get_total_time().total_seconds()
-        if step <= 0 or total_seconds <= 0:
-            speed = '-'
-        else:
-            speed = filesizeformat(float(step) / total_seconds)
-        return '%s: %s/s' % (self.speed_label, speed)
-
-    def get_rendered_transfer(self, step=None, steps=None):
-        if step is None:
-            step = self.step
-        if steps is None:
-            steps = self.steps
-
-        if steps <= 0:
-            return '%s: -' % self.transfer_label
-        total = filesizeformat(float(steps))
-        if step <= 0:
-            transferred = '-'
-        else:
-            transferred = filesizeformat(float(step))
-        return '%s: %s / %s' % (self.transfer_label, transferred, total)
-
-    def get_context(self):
-        return {
-            'percentage': self.get_rendered_percentage(),
-            'bar': self.get_bar(),
-            'steps': self.get_rendered_steps(),
-            'time': self.get_rendered_total_time(),
-            'eta': self.get_rendered_eta(),
-            'speed': self.get_rendered_speed(),
-            'transfer': self.get_rendered_transfer(),
-        }
-
-    def get_line(self):
-        template = self.get_template()
-        context = self.get_context()
-        return template.safe_substitute(**context)
-
-    def write(self, data):
-        self.stream.write(data)
-
-    def render(self, step):
-        if not self.started:
-            self.started = datetime.datetime.now()
-        if self.finished:
-            raise AlreadyFinishedError
-        self.step = step
-        self.write('\r%s' % self)
-        if step == self.steps:
-            self.finished = True
-        if step == self.steps:
-            self.write('\n')
-
-
-"""
-termcolors.py
-
-Grabbed from Django (http://www.djangoproject.com)
-"""
-
-color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
-foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
-background = dict([(color_names[x], '4%s' % x) for x in range(8)])
-
-RESET = '0'
-opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
-
-
-def colorize(text='', opts=(), **kwargs):
-    """
-    Returns your text, enclosed in ANSI graphics codes.
-
-    Depends on the keyword arguments 'fg' and 'bg', and the contents of
-    the opts tuple/list.
-
-    Returns the RESET code if no parameters are given.
-
-    Valid colors:
-        'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
-
-    Valid options:
-        'bold'
-        'underscore'
-        'blink'
-        'reverse'
-        'conceal'
-        'noreset' - string will not be auto-terminated with the RESET code
-
-    Examples:
-        colorize('hello', fg='red', bg='blue', opts=('blink',))
-        colorize()
-        colorize('goodbye', opts=('underscore',))
-        print colorize('first line', fg='red', opts=('noreset',))
-        print 'this should be red too'
-        print colorize('and so should this')
-        print 'this should not be red'
-    """
-    code_list = []
-    if text == '' and len(opts) == 1 and opts[0] == 'reset':
-        return '\x1b[%sm' % RESET
-    for k, v in kwargs.items():
-        if k == 'fg':
-            code_list.append(foreground[v])
-        elif k == 'bg':
-            code_list.append(background[v])
-    for o in opts:
-        if o in opt_dict:
-            code_list.append(opt_dict[o])
-    if 'noreset' not in opts:
-        text = text + '\x1b[%sm' % RESET
-    return ('\x1b[%sm' % ';'.join(code_list)) + text
-
-
-def make_style(opts=(), **kwargs):
-    """
-    Returns a function with default parameters for colorize()
-
-    Example:
-        bold_red = make_style(opts=('bold',), fg='red')
-        print bold_red('hello')
-        KEYWORD = make_style(fg='yellow')
-        COMMENT = make_style(fg='blue', opts=('bold',))
-    """
-    return lambda text: colorize(text, opts, **kwargs)
-
-
-NOCOLOR_PALETTE = 'nocolor'
-DARK_PALETTE = 'dark'
-LIGHT_PALETTE = 'light'
-
-PALETTES = {
-    NOCOLOR_PALETTE: {
-        'ERROR':        {},
-        'NOTICE':       {},
-        'SQL_FIELD':    {},
-        'SQL_COLTYPE':  {},
-        'SQL_KEYWORD':  {},
-        'SQL_TABLE':    {},
-        'HTTP_INFO':         {},
-        'HTTP_SUCCESS':      {},
-        'HTTP_REDIRECT':     {},
-        'HTTP_NOT_MODIFIED': {},
-        'HTTP_BAD_REQUEST':  {},
-        'HTTP_NOT_FOUND':    {},
-        'HTTP_SERVER_ERROR': {},
-    },
-    DARK_PALETTE: {
-        'ERROR':        { 'fg': 'red', 'opts': ('bold',) },
-        'NOTICE':       { 'fg': 'red' },
-        'SQL_FIELD':    { 'fg': 'green', 'opts': ('bold',) },
-        'SQL_COLTYPE':  { 'fg': 'green' },
-        'SQL_KEYWORD':  { 'fg': 'yellow' },
-        'SQL_TABLE':    { 'opts': ('bold',) },
-        'HTTP_INFO':         { 'opts': ('bold',) },
-        'HTTP_SUCCESS':      { },
-        'HTTP_REDIRECT':     { 'fg': 'green' },
-        'HTTP_NOT_MODIFIED': { 'fg': 'cyan' },
-        'HTTP_BAD_REQUEST':  { 'fg': 'red', 'opts': ('bold',) },
-        'HTTP_NOT_FOUND':    { 'fg': 'yellow' },
-        'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
-    },
-    LIGHT_PALETTE: {
-        'ERROR':        { 'fg': 'red', 'opts': ('bold',) },
-        'NOTICE':       { 'fg': 'red' },
-        'SQL_FIELD':    { 'fg': 'green', 'opts': ('bold',) },
-        'SQL_COLTYPE':  { 'fg': 'green' },
-        'SQL_KEYWORD':  { 'fg': 'blue' },
-        'SQL_TABLE':    { 'opts': ('bold',) },
-        'HTTP_INFO':         { 'opts': ('bold',) },
-        'HTTP_SUCCESS':      { },
-        'HTTP_REDIRECT':     { 'fg': 'green', 'opts': ('bold',) },
-        'HTTP_NOT_MODIFIED': { 'fg': 'green' },
-        'HTTP_BAD_REQUEST':  { 'fg': 'red', 'opts': ('bold',) },
-        'HTTP_NOT_FOUND':    { 'fg': 'red' },
-        'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
-    }
-}
-DEFAULT_PALETTE = DARK_PALETTE
-
-# ---------------------------- #
-# --- End of termcolors.py --- #
-# ---------------------------- #
-
-
-class ColoredProgressBar(ProgressBar):
-
-    BAR_COLORS = (
-        (10, 'red'),
-        (30, 'magenta'),
-        (50, 'yellow'),
-        (99, 'green'),
-        (100, 'blue'),
-    )
-
-    def get_line(self):
-        line = super(ColoredProgressBar, self).get_line()
-        perc = self.get_percentage()
-        if perc > 100:
-            color = 'blue'
-        for max_perc, color in self.BAR_COLORS:
-            if perc <= max_perc:
-                break
-        return colorize(line, fg=color)
-
-
-class AnimatedProgressBar(ProgressBar):
-
-    def get_bar_char(self):
-        chars = '-/|\\'
-        if self.step >= self.steps:
-            return '='
-        return chars[self.step % len(chars)]
-
-
-class BarOnlyProgressBar(ProgressBar):
-
-    default_elements = ['bar', 'steps']
-
-    def get_bar(self):
-        bar = super(BarOnlyProgressBar, self).get_bar()
-        perc = self.get_percentage()
-        perc_text = '%s%%' % int(perc)
-        text = (' %s%% ' % (perc_text)).center(self.width, '=')
-        L = text.find(' ')
-        R = text.rfind(' ')
-        bar = ' '.join((bar[:L], perc_text, bar[R:]))
-        return bar
-
-
-class AnimatedColoredProgressBar(AnimatedProgressBar,
-                                 ColoredProgressBar):
-    pass
-
-
-class BarOnlyColoredProgressBar(ColoredProgressBar,
-                                BarOnlyProgressBar):
-    pass
-
-
-def main():
-    import time
-
-    print("Standard progress bar...")
-    bar = ProgressBar(30)
-    for x in range(1, 31):
-        bar.render(x)
-        time.sleep(0.02)
-    bar.stream.write('\n')
-    print()
-
-    print("Empty bar...")
-    bar = ProgressBar(50)
-    bar.render(0)
-    print()
-    print()
-
-    print("Colored bar...")
-    bar = ColoredProgressBar(20)
-    for x in bar:
-        time.sleep(0.01)
-    print()
-
-    print("Animated char bar...")
-    bar = AnimatedProgressBar(20)
-    for x in bar:
-        time.sleep(0.01)
-    print()
-
-    print("Animated + colored char bar...")
-    bar = AnimatedColoredProgressBar(20)
-    for x in bar:
-        time.sleep(0.01)
-    print()
-
-    print("Bar only ...")
-    bar = BarOnlyProgressBar(20)
-    for x in bar:
-        time.sleep(0.01)
-    print()
-
-    print("Colored, longer bar-only, eta, total time ...")
-    bar = BarOnlyColoredProgressBar(40)
-    bar.width = 60
-    bar.elements += ['time', 'eta']
-    for x in bar:
-        time.sleep(0.01)
-    print()
-    print()
-
-    print("File transfer bar, breaks after 2 seconds ...")
-    total_bytes = 1024 * 1024 * 2
-    bar = ProgressBar(total_bytes)
-    bar.width = 50
-    bar.elements.remove('steps')
-    bar.elements += ['transfer', 'time', 'eta', 'speed']
-    for x in range(0, bar.steps, 1024):
-        bar.render(x)
-        time.sleep(0.01)
-        now = datetime.datetime.now()
-        if now - bar.started >= datetime.timedelta(seconds=2):
-            break
-    print()
-    print()
-
-
-if __name__ == '__main__':
-    main()
--- a/kallithea/lib/verlib.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,329 +0,0 @@
-"""
-"Rational" version definition and parsing for DistutilsVersionFight
-discussion at PyCon 2009.
-"""
-
-import re
-
-
-class IrrationalVersionError(Exception):
-    """This is an irrational version."""
-    pass
-
-
-class HugeMajorVersionNumError(IrrationalVersionError):
-    """An irrational version because the major version number is huge
-    (often because a year or date was used).
-
-    See `error_on_huge_major_num` option in `NormalizedVersion` for details.
-    This guard can be disabled by setting that option False.
-    """
-    pass
-
-
-# A marker used in the second and third parts of the `parts` tuple, for
-# versions that don't have those segments, to sort properly. An example
-# of versions in sort order ('highest' last):
-#   1.0b1                 ((1,0), ('b',1), ('f',))
-#   1.0.dev345            ((1,0), ('f',),  ('dev', 345))
-#   1.0                   ((1,0), ('f',),  ('f',))
-#   1.0.post256.dev345    ((1,0), ('f',),  ('f', 'post', 256, 'dev', 345))
-#   1.0.post345           ((1,0), ('f',),  ('f', 'post', 345, 'f'))
-#                                   ^        ^                 ^
-#   'b' < 'f' ---------------------/         |                 |
-#                                            |                 |
-#   'dev' < 'f' < 'post' -------------------/                  |
-#                                                              |
-#   'dev' < 'f' ----------------------------------------------/
-# Other letters would do, but 'f' for 'final' is kind of nice.
-FINAL_MARKER = ('f',)
-
-VERSION_RE = re.compile(r'''
-    ^
-    (?P<version>\d+\.\d+)          # minimum 'N.N'
-    (?P<extraversion>(?:\.\d+)*)   # any number of extra '.N' segments
-    (?:
-        (?P<prerel>[abc]|rc)       # 'a'=alpha, 'b'=beta, 'c'=release candidate
-                                   # 'rc'= alias for release candidate
-        (?P<prerelversion>\d+(?:\.\d+)*)
-    )?
-    (?P<postdev>(\.post(?P<post>\d+))?(\.dev(?P<dev>\d+))?)?
-    $''', re.VERBOSE)
-
-
-class NormalizedVersion(object):
-    """A rational version.
-
-    Good:
-        1.2         # equivalent to "1.2.0"
-        1.2.0
-        1.2a1
-        1.2.3a2
-        1.2.3b1
-        1.2.3c1
-        1.2.3.4
-        TODO: fill this out
-
-    Bad:
-        1           # mininum two numbers
-        1.2a        # release level must have a release serial
-        1.2.3b
-    """
-    def __init__(self, s, error_on_huge_major_num=True):
-        """Create a NormalizedVersion instance from a version string.
-
-        :param s {str} The version string.
-        :param error_on_huge_major_num {bool} Whether to consider an
-            apparent use of a year or full date as the major version number
-            an error. Default True. One of the observed patterns on PyPI before
-            the introduction of `NormalizedVersion` was version numbers like this:
-                2009.01.03
-                20040603
-                2005.01
-            This guard is here to strongly encourage the package author to
-            use an alternate version, because a release deployed into PyPI
-            and, e.g. downstream Linux package managers, will forever remove
-            the possibility of using a version number like "1.0" (i.e.
-            where the major number is less than that huge major number).
-        """
-        self._parse(s, error_on_huge_major_num)
-
-    @classmethod
-    def from_parts(cls, version, prerelease=FINAL_MARKER,
-                   devpost=FINAL_MARKER):
-        return cls(cls.parts_to_str((version, prerelease, devpost)))
-
-    def _parse(self, s, error_on_huge_major_num=True):
-        """Parses a string version into parts."""
-        match = VERSION_RE.search(s)
-        if not match:
-            raise IrrationalVersionError(s)
-
-        groups = match.groupdict()
-        parts = []
-
-        # main version
-        block = self._parse_numdots(groups['version'], s, False, 2)
-        extraversion = groups.get('extraversion')
-        if extraversion not in ('', None):
-            block += self._parse_numdots(extraversion[1:], s)
-        parts.append(tuple(block))
-
-        # prerelease
-        prerel = groups.get('prerel')
-        if prerel is not None:
-            block = [prerel]
-            block += self._parse_numdots(groups.get('prerelversion'), s,
-                                         pad_zeros_length=1)
-            parts.append(tuple(block))
-        else:
-            parts.append(FINAL_MARKER)
-
-        # postdev
-        if groups.get('postdev'):
-            post = groups.get('post')
-            dev = groups.get('dev')
-            postdev = []
-            if post is not None:
-                postdev.extend([FINAL_MARKER[0], 'post', int(post)])
-                if dev is None:
-                    postdev.append(FINAL_MARKER[0])
-            if dev is not None:
-                postdev.extend(['dev', int(dev)])
-            parts.append(tuple(postdev))
-        else:
-            parts.append(FINAL_MARKER)
-        self.parts = tuple(parts)
-        if error_on_huge_major_num and self.parts[0][0] > 1980:
-            raise HugeMajorVersionNumError("huge major version number, %r, "
-                "which might cause future problems: %r" % (self.parts[0][0], s))
-
-    def _parse_numdots(self, s, full_ver_str, drop_trailing_zeros=True,
-                       pad_zeros_length=0):
-        """Parse 'N.N.N' sequences, return a list of ints.
-
-        :param s {str} 'N.N.N..." sequence to be parsed
-        :param full_ver_str {str} The full version string from which this
-            comes. Used for error strings.
-        :param drop_trailing_zeros {bool} Whether to drop trailing zeros
-            from the returned list. Default True.
-        :param pad_zeros_length {int} The length to which to pad the
-            returned list with zeros, if necessary. Default 0.
-        """
-        nums = []
-        for n in s.split("."):
-            if len(n) > 1 and n[0] == '0':
-                raise IrrationalVersionError("cannot have leading zero in "
-                    "version number segment: '%s' in %r" % (n, full_ver_str))
-            nums.append(int(n))
-        if drop_trailing_zeros:
-            while nums and nums[-1] == 0:
-                nums.pop()
-        while len(nums) < pad_zeros_length:
-            nums.append(0)
-        return nums
-
-    def __str__(self):
-        return self.parts_to_str(self.parts)
-
-    @classmethod
-    def parts_to_str(cls, parts):
-        """Transforms a version expressed in tuple into its string
-        representation."""
-        # XXX This doesn't check for invalid tuples
-        main, prerel, postdev = parts
-        s = '.'.join(str(v) for v in main)
-        if prerel is not FINAL_MARKER:
-            s += prerel[0]
-            s += '.'.join(str(v) for v in prerel[1:])
-        if postdev and postdev is not FINAL_MARKER:
-            if postdev[0] == 'f':
-                postdev = postdev[1:]
-            i = 0
-            while i < len(postdev):
-                if i % 2 == 0:
-                    s += '.'
-                s += str(postdev[i])
-                i += 1
-        return s
-
-    def __repr__(self):
-        return "%s('%s')" % (self.__class__.__name__, self)
-
-    def _cannot_compare(self, other):
-        raise TypeError("cannot compare %s and %s"
-                % (type(self).__name__, type(other).__name__))
-
-    def __eq__(self, other):
-        if not isinstance(other, NormalizedVersion):
-            self._cannot_compare(other)
-        return self.parts == other.parts
-
-    def __lt__(self, other):
-        if not isinstance(other, NormalizedVersion):
-            self._cannot_compare(other)
-        return self.parts < other.parts
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    def __gt__(self, other):
-        return not (self.__lt__(other) or self.__eq__(other))
-
-    def __le__(self, other):
-        return self.__eq__(other) or self.__lt__(other)
-
-    def __ge__(self, other):
-        return self.__eq__(other) or self.__gt__(other)
-
-
-def suggest_normalized_version(s):
-    """Suggest a normalized version close to the given version string.
-
-    If you have a version string that isn't rational (i.e. NormalizedVersion
-    doesn't like it) then you might be able to get an equivalent (or close)
-    rational version from this function.
-
-    This does a number of simple normalizations to the given string, based
-    on observation of versions currently in use on PyPI. Given a dump of
-    those version during PyCon 2009, 4287 of them:
-    - 2312 (53.93%) match NormalizedVersion without change
-    - with the automatic suggestion
-    - 3474 (81.04%) match when using this suggestion method
-
-    :param s {str} An irrational version string.
-    :returns: A rational version string, or None, if couldn't determine one.
-    """
-    try:
-        NormalizedVersion(s)
-        return s   # already rational
-    except IrrationalVersionError:
-        pass
-
-    rs = s.lower()
-
-    # part of this could use maketrans
-    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
-                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
-                       ('-pre', 'c'),
-                       ('-release', ''), ('.release', ''), ('-stable', ''),
-                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
-                       ('final', '')):
-        rs = rs.replace(orig, repl)
-
-    # if something ends with dev or pre, we add a 0
-    rs = re.sub(r"pre$", r"pre0", rs)
-    rs = re.sub(r"dev$", r"dev0", rs)
-
-    # if we have something like "b-2" or "a.2" at the end of the
-    # version, that is probably beta, alpha, etc
-    # let's remove the dash or dot
-    rs = re.sub(r"([abc|rc])[\-\.](\d+)$", r"\1\2", rs)
-
-    # 1.0-dev-r371 -> 1.0.dev371
-    # 0.1-dev-r79 -> 0.1.dev79
-    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
-
-    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
-    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
-
-    # Clean: v0.3, v1.0
-    if rs.startswith('v'):
-        rs = rs[1:]
-
-    # Clean leading '0's on numbers.
-    # TODO: unintended side-effect on, e.g., "2003.05.09"
-    # PyPI stats: 77 (~2%) better
-    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
-
-    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
-    # zero.
-    # PyPI stats: 245 (7.56%) better
-    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
-
-    # the 'dev-rNNN' tag is a dev tag
-    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
-
-    # clean the - when used as a pre delimiter
-    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
-
-    # a terminal "dev" or "devel" can be changed into ".dev0"
-    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
-
-    # a terminal "dev" can be changed into ".dev0"
-    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
-
-    # a terminal "final" or "stable" can be removed
-    rs = re.sub(r"(final|stable)$", "", rs)
-
-    # The 'r' and the '-' tags are post release tags
-    #   0.4a1.r10       ->  0.4a1.post10
-    #   0.9.33-17222    ->  0.9.3.post17222
-    #   0.9.33-r17222   ->  0.9.3.post17222
-    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
-
-    # Clean 'r' instead of 'dev' usage:
-    #   0.9.33+r17222   ->  0.9.3.dev17222
-    #   1.0dev123       ->  1.0.dev123
-    #   1.0.git123      ->  1.0.dev123
-    #   1.0.bzr123      ->  1.0.dev123
-    #   0.1a0dev.123    ->  0.1a0.dev123
-    # PyPI stats:  ~150 (~4%) better
-    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
-
-    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
-    #   0.2.pre1        ->  0.2c1
-    #   0.2-c1         ->  0.2c1
-    #   1.0preview123   ->  1.0c123
-    # PyPI stats: ~21 (0.62%) better
-    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
-
-    # Tcl/Tk uses "px" for their post release markers
-    rs = re.sub(r"p(\d+)$", r".post\1", rs)
-
-    try:
-        NormalizedVersion(rs)
-        return rs   # already rational
-    except IrrationalVersionError:
-        pass
-    return None
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/lib/webutils.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,668 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.lib.webutils
+~~~~~~~~~~~~~~~~~~~~~~
+
+Helper functions that may rely on the current WSGI request, exposed in the TG2
+thread-local "global" variables. It should have few dependencies so it can be
+imported anywhere - just like the global variables can be used everywhere.
+"""
+
+import datetime
+import json
+import logging
+import random
+import re
+
+from dateutil import relativedelta
+from tg import request, session
+from tg.i18n import ugettext as _
+from tg.i18n import ungettext
+from webhelpers2.html import HTML, escape, literal
+from webhelpers2.html.tags import NotGiven, Option, Options, _input
+from webhelpers2.html.tags import _make_safe_id_component as safeid
+from webhelpers2.html.tags import checkbox, end_form
+from webhelpers2.html.tags import form as insecure_form
+from webhelpers2.html.tags import hidden, link_to, password, radio
+from webhelpers2.html.tags import select as webhelpers2_select
+from webhelpers2.html.tags import submit, text, textarea
+from webhelpers2.number import format_byte_size
+from webhelpers2.text import chop_at, truncate, wrap_paragraphs
+
+import kallithea
+
+
+log = logging.getLogger(__name__)
+
+
+# mute pyflakes "imported but unused"
+assert Option
+assert checkbox
+assert chop_at
+assert end_form
+assert escape
+assert format_byte_size
+assert link_to
+assert literal
+assert password
+assert radio
+assert safeid
+assert submit
+assert text
+assert textarea
+assert truncate
+assert wrap_paragraphs
+
+
+#
+# General Kallithea URL handling
+#
+
+class UrlGenerator(object):
+    """Emulate pylons.url in providing a wrapper around routes.url
+
+    This code was added during migration from Pylons to Turbogears2. Pylons
+    already provided a wrapper like this, but Turbogears2 does not.
+
+    When the routing of Kallithea is changed to use less Routes and more
+    Turbogears2-style routing, this class may disappear or change.
+
+    url() (the __call__ method) returns the URL based on a route name and
+    arguments.
+    url.current() returns the URL of the current page with arguments applied.
+
+    Refer to documentation of Routes for details:
+    https://routes.readthedocs.io/en/latest/generating.html#generation
+    """
+    def __call__(self, *args, **kwargs):
+        return request.environ['routes.url'](*args, **kwargs)
+
+    def current(self, *args, **kwargs):
+        return request.environ['routes.url'].current(*args, **kwargs)
+
+
+url = UrlGenerator()
+
+
+def canonical_url(*args, **kargs):
+    '''Like url(x, qualified=True), but returns url that not only is qualified
+    but also canonical, as configured in canonical_url'''
+    try:
+        parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1)
+        kargs['host'] = parts[1]
+        kargs['protocol'] = parts[0]
+    except IndexError:
+        kargs['qualified'] = True
+    return url(*args, **kargs)
+
+
+def canonical_hostname():
+    '''Return canonical hostname of system'''
+    try:
+        parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1)
+        return parts[1].split('/', 1)[0]
+    except IndexError:
+        parts = url('home', qualified=True).split('://', 1)
+        return parts[1].split('/', 1)[0]
+
+
+#
+# Custom Webhelpers2 stuff
+#
+
+def html_escape(s):
+    """Return string with all html escaped.
+    This is also safe for javascript in html but not necessarily correct.
+    """
+    return (s
+        .replace('&', '&amp;')
+        .replace(">", "&gt;")
+        .replace("<", "&lt;")
+        .replace('"', "&quot;")
+        .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
+        )
+
+
+def reset(name, value, id=NotGiven, **attrs):
+    """Create a reset button, similar to webhelpers2.html.tags.submit ."""
+    return _input("reset", name, value, id, attrs)
+
+
+def select(name, selected_values, options, id=NotGiven, **attrs):
+    """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
+    if isinstance(options, list):
+        option_list = options
+        # Handle old value,label lists ... where value also can be value,label lists
+        options = Options()
+        for x in option_list:
+            if isinstance(x, tuple) and len(x) == 2:
+                value, label = x
+            elif isinstance(x, str):
+                value = label = x
+            else:
+                log.error('invalid select option %r', x)
+                raise
+            if isinstance(value, list):
+                og = options.add_optgroup(label)
+                for x in value:
+                    if isinstance(x, tuple) and len(x) == 2:
+                        group_value, group_label = x
+                    elif isinstance(x, str):
+                        group_value = group_label = x
+                    else:
+                        log.error('invalid select option %r', x)
+                        raise
+                    og.add_option(group_label, group_value)
+            else:
+                options.add_option(label, value)
+    return webhelpers2_select(name, selected_values, options, id=id, **attrs)
+
+
+session_csrf_secret_name = "_session_csrf_secret_token"
+
+def session_csrf_secret_token():
+    """Return (and create) the current session's CSRF protection token."""
+    if not session_csrf_secret_name in session:
+        session[session_csrf_secret_name] = str(random.getrandbits(128))
+        session.save()
+    return session[session_csrf_secret_name]
+
+def form(url, method="post", **attrs):
+    """Like webhelpers.html.tags.form , but automatically adding
+    session_csrf_secret_token for POST. The secret is thus never leaked in GET
+    URLs.
+    """
+    form = insecure_form(url, method, **attrs)
+    if method.lower() == 'get':
+        return form
+    return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
+
+
+#
+# Flash messages, stored in cookie
+#
+
+class _Message(object):
+    """A message returned by ``pop_flash_messages()``.
+
+    Converting the message to a string returns the message text. Instances
+    also have the following attributes:
+
+    * ``category``: the category specified when the message was created.
+    * ``message``: the html-safe message text.
+    """
+
+    def __init__(self, category, message):
+        self.category = category
+        self.message = message
+
+
+def _session_flash_messages(append=None, clear=False):
+    """Manage a message queue in tg.session: return the current message queue
+    after appending the given message, and possibly clearing the queue."""
+    key = 'flash'
+    if key in session:
+        flash_messages = session[key]
+    else:
+        if append is None:  # common fast path - also used for clearing empty queue
+            return []  # don't bother saving
+        flash_messages = []
+        session[key] = flash_messages
+    if append is not None and append not in flash_messages:
+        flash_messages.append(append)
+    if clear:
+        session.pop(key, None)
+    session.save()
+    return flash_messages
+
+
+def flash(message, category, logf=None):
+    """
+    Show a message to the user _and_ log it through the specified function
+
+    category: notice (default), warning, error, success
+    logf: a custom log function - such as log.debug
+
+    logf defaults to log.info, unless category equals 'success', in which
+    case logf defaults to log.debug.
+    """
+    assert category in ('error', 'success', 'warning'), category
+    if hasattr(message, '__html__'):
+        # render to HTML for storing in cookie
+        safe_message = str(message)
+    else:
+        # Apply str - the message might be an exception with __str__
+        # Escape, so we can trust the result without further escaping, without any risk of injection
+        safe_message = html_escape(str(message))
+    if logf is None:
+        logf = log.info
+        if category == 'success':
+            logf = log.debug
+
+    logf('Flash %s: %s', category, safe_message)
+
+    _session_flash_messages(append=(category, safe_message))
+
+
+def pop_flash_messages():
+    """Return all accumulated messages and delete them from the session.
+
+    The return value is a list of ``Message`` objects.
+    """
+    return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
+
+
+#
+# Generic-ish formatting and markup
+#
+
+def js(value):
+    """Convert Python value to the corresponding JavaScript representation.
+
+    This is necessary to safely insert arbitrary values into HTML <script>
+    sections e.g. using Mako template expression substitution.
+
+    Note: Rather than using this function, it's preferable to avoid the
+    insertion of values into HTML <script> sections altogether. Instead,
+    data should (to the extent possible) be passed to JavaScript using
+    data attributes or AJAX calls, eliminating the need for JS specific
+    escaping.
+
+    Note: This is not safe for use in attributes (e.g. onclick), because
+    quotes are not escaped.
+
+    Because the rules for parsing <script> varies between XHTML (where
+    normal rules apply for any special characters) and HTML (where
+    entities are not interpreted, but the literal string "</script>"
+    is forbidden), the function ensures that the result never contains
+    '&', '<' and '>', thus making it safe in both those contexts (but
+    not in attributes).
+    """
+    return literal(
+        ('(' + json.dumps(value) + ')')
+        # In JSON, the following can only appear in string literals.
+        .replace('&', r'\x26')
+        .replace('<', r'\x3c')
+        .replace('>', r'\x3e')
+    )
+
+
+def jshtml(val):
+    """HTML escapes a string value, then converts the resulting string
+    to its corresponding JavaScript representation (see `js`).
+
+    This is used when a plain-text string (possibly containing special
+    HTML characters) will be used by a script in an HTML context (e.g.
+    element.innerHTML or jQuery's 'html' method).
+
+    If in doubt, err on the side of using `jshtml` over `js`, since it's
+    better to escape too much than too little.
+    """
+    return js(escape(val))
+
+
+url_re = re.compile(r'''\bhttps?://(?:[\da-zA-Z0-9@:.-]+)'''
+                    r'''(?:[/a-zA-Z0-9_=@#~&+%.,:;?!*()-]*[/a-zA-Z0-9_=@#~])?''')
+
+
+# Must match regexp in kallithea/public/js/base.js MentionsAutoComplete()
+# Check char before @ - it must not look like we are in an email addresses.
+# Matching is greedy so we don't have to look beyond the end.
+MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])')
+
+
+def extract_mentioned_usernames(text):
+    r"""
+    Returns list of (possible) usernames @mentioned in given text.
+
+    >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,')
+    ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz']
+    """
+    return MENTIONS_REGEX.findall(text)
+
+
+_URLIFY_RE = re.compile(r'''
+# URL markup
+(?P<url>%s) |
+# @mention markup
+(?P<mention>%s) |
+# Changeset hash markup
+(?<!\w|[-_])
+  (?P<hash>[0-9a-f]{12,40})
+(?!\w|[-_]) |
+# Markup of *bold text*
+(?:
+  (?:^|(?<=\s))
+  (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
+  (?![*\w])
+) |
+# "Stylize" markup
+\[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
+\[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
+\[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
+\[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
+\[(?P<tag>[a-z]+)\]
+''' % (url_re.pattern, MENTIONS_REGEX.pattern),
+    re.VERBOSE | re.MULTILINE | re.IGNORECASE)
+
+
+def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
+    """
+    Parses given text message and make literal html with markup.
+    The text will be truncated to the specified length.
+    Hashes are turned into changeset links to specified repository.
+    URLs links to what they say.
+    Issues are linked to given issue-server.
+    If link_ is provided, all text not already linking somewhere will link there.
+    >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>")
+    literal('Urlify <a href="http://example.com/">http://example.com/</a> and &#39;<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> &lt;b&gt;markup/b&gt;')
+    """
+
+    def _replace(match_obj):
+        match_url = match_obj.group('url')
+        if match_url is not None:
+            return '<a href="%(url)s">%(url)s</a>' % {'url': match_url}
+        mention = match_obj.group('mention')
+        if mention is not None:
+            return '<b>%s</b>' % mention
+        hash_ = match_obj.group('hash')
+        if hash_ is not None and repo_name is not None:
+            return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
+                 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
+                 'hash': hash_,
+                }
+        bold = match_obj.group('bold')
+        if bold is not None:
+            return '<b>*%s*</b>' % _urlify(bold[1:-1])
+        if stylize:
+            seen = match_obj.group('seen')
+            if seen:
+                return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
+            license = match_obj.group('license')
+            if license:
+                return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
+            tagtype = match_obj.group('tagtype')
+            if tagtype:
+                tagvalue = match_obj.group('tagvalue')
+                return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
+            lang = match_obj.group('lang')
+            if lang:
+                return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
+            tag = match_obj.group('tag')
+            if tag:
+                return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
+        return match_obj.group(0)
+
+    def _urlify(s):
+        """
+        Extract urls from text and make html links out of them
+        """
+        return _URLIFY_RE.sub(_replace, s)
+
+    if truncate is None:
+        s = s.rstrip()
+    else:
+        s = truncatef(s, truncate, whole_word=True)
+    s = html_escape(s)
+    s = _urlify(s)
+    if repo_name is not None:
+        s = _urlify_issues(s, repo_name)
+    if link_ is not None:
+        # make href around everything that isn't a href already
+        s = _linkify_others(s, link_)
+    s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
+    # Turn HTML5 into more valid HTML4 as required by some mail readers.
+    # (This is not done in one step in html_escape, because character codes like
+    # &#123; risk to be seen as an issue reference due to the presence of '#'.)
+    s = s.replace("&apos;", "&#39;")
+    return literal(s)
+
+
+def _linkify_others(t, l):
+    """Add a default link to html with links.
+    HTML doesn't allow nesting of links, so the outer link must be broken up
+    in pieces and give space for other links.
+    """
+    urls = re.compile(r'(\<a.*?\<\/a\>)',)
+    links = []
+    for e in urls.split(t):
+        if e.strip() and not urls.match(e):
+            links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
+        else:
+            links.append(e)
+    return ''.join(links)
+
+
+# Global variable that will hold the actual _urlify_issues function body.
+# Will be set on first use when the global configuration has been read.
+_urlify_issues_f = None
+
+
+def _urlify_issues(newtext, repo_name):
+    """Urlify issue references according to .ini configuration"""
+    global _urlify_issues_f
+    if _urlify_issues_f is None:
+        assert kallithea.CONFIG['sqlalchemy.url'] # make sure config has been loaded
+
+        # Build chain of urlify functions, starting with not doing any transformation
+        def tmp_urlify_issues_f(s):
+            return s
+
+        issue_pat_re = re.compile(r'issue_pat(.*)')
+        for k in kallithea.CONFIG:
+            # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
+            m = issue_pat_re.match(k)
+            if m is None:
+                continue
+            suffix = m.group(1)
+            issue_pat = kallithea.CONFIG.get(k)
+            issue_server_link = kallithea.CONFIG.get('issue_server_link%s' % suffix)
+            issue_sub = kallithea.CONFIG.get('issue_sub%s' % suffix)
+            issue_prefix = kallithea.CONFIG.get('issue_prefix%s' % suffix)
+            if issue_prefix:
+                log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix)
+            if not issue_pat:
+                log.error('skipping incomplete issue pattern %r: it needs a regexp', k)
+                continue
+            if not issue_server_link:
+                log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix)
+                continue
+            if issue_sub is None: # issue_sub can be empty but should be present
+                log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix)
+                continue
+
+            # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
+            try:
+                issue_re = re.compile(issue_pat)
+            except re.error as e:
+                log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e))
+                continue
+
+            log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub)
+
+            def issues_replace(match_obj,
+                               issue_server_link=issue_server_link, issue_sub=issue_sub):
+                try:
+                    issue_url = match_obj.expand(issue_server_link)
+                except (IndexError, re.error) as e:
+                    log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
+                    issue_url = issue_server_link
+                issue_url = issue_url.replace('{repo}', repo_name)
+                issue_url = issue_url.replace('{repo_name}', repo_name.split(kallithea.URL_SEP)[-1])
+                # if issue_sub is empty use the matched issue reference verbatim
+                if not issue_sub:
+                    issue_text = match_obj.group()
+                else:
+                    try:
+                        issue_text = match_obj.expand(issue_sub)
+                    except (IndexError, re.error) as e:
+                        log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
+                        issue_text = match_obj.group()
+
+                return (
+                    '<a class="issue-tracker-link" href="%(url)s">'
+                    '%(text)s'
+                    '</a>'
+                    ) % {
+                     'url': issue_url,
+                     'text': issue_text,
+                    }
+
+            def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
+                return issue_re.sub(issues_replace, chain_f(s))
+
+        # Set tmp function globally - atomically
+        _urlify_issues_f = tmp_urlify_issues_f
+
+    return _urlify_issues_f(newtext)
+
+
+def render_w_mentions(source, repo_name=None):
+    """
+    Render plain text with revision hashes and issue references urlified
+    and with @mention highlighting.
+    """
+    s = urlify_text(source, repo_name=repo_name)
+    return literal('<div class="formatted-fixed">%s</div>' % s)
+
+
+#
+# Simple filters
+#
+
+def shorter(s, size=20, firstline=False, postfix='...'):
+    """Truncate s to size, including the postfix string if truncating.
+    If firstline, truncate at newline.
+    """
+    if firstline:
+        s = s.split('\n', 1)[0].rstrip()
+    if len(s) > size:
+        return s[:size - len(postfix)] + postfix
+    return s
+
+
+def age(prevdate, show_short_version=False, now=None):
+    """
+    turns a datetime into an age string.
+    If show_short_version is True, then it will generate a not so accurate but shorter string,
+    example: 2days ago, instead of 2 days and 23 hours ago.
+
+    :param prevdate: datetime object
+    :param show_short_version: if it should approximate the date and return a shorter string
+    :rtype: str
+    :returns: str words describing age
+    """
+    now = now or datetime.datetime.now()
+    order = ['year', 'month', 'day', 'hour', 'minute', 'second']
+    deltas = {}
+    future = False
+
+    if prevdate > now:
+        now, prevdate = prevdate, now
+        future = True
+    if future:
+        prevdate = prevdate.replace(microsecond=0)
+    # Get date parts deltas
+    for part in order:
+        d = relativedelta.relativedelta(now, prevdate)
+        deltas[part] = getattr(d, part + 's')
+
+    # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
+    # not 1 hour, -59 minutes and -59 seconds)
+    for num, length in [(5, 60), (4, 60), (3, 24)]:  # seconds, minutes, hours
+        part = order[num]
+        carry_part = order[num - 1]
+
+        if deltas[part] < 0:
+            deltas[part] += length
+            deltas[carry_part] -= 1
+
+    # Same thing for days except that the increment depends on the (variable)
+    # number of days in the month
+    month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
+    if deltas['day'] < 0:
+        if prevdate.month == 2 and (prevdate.year % 4 == 0 and
+            (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)
+        ):
+            deltas['day'] += 29
+        else:
+            deltas['day'] += month_lengths[prevdate.month - 1]
+
+        deltas['month'] -= 1
+
+    if deltas['month'] < 0:
+        deltas['month'] += 12
+        deltas['year'] -= 1
+
+    # In short version, we want nicer handling of ages of more than a year
+    if show_short_version:
+        if deltas['year'] == 1:
+            # ages between 1 and 2 years: show as months
+            deltas['month'] += 12
+            deltas['year'] = 0
+        if deltas['year'] >= 2:
+            # ages 2+ years: round
+            if deltas['month'] > 6:
+                deltas['year'] += 1
+                deltas['month'] = 0
+
+    # Format the result
+    fmt_funcs = {
+        'year': lambda d: ungettext('%d year', '%d years', d) % d,
+        'month': lambda d: ungettext('%d month', '%d months', d) % d,
+        'day': lambda d: ungettext('%d day', '%d days', d) % d,
+        'hour': lambda d: ungettext('%d hour', '%d hours', d) % d,
+        'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d,
+        'second': lambda d: ungettext('%d second', '%d seconds', d) % d,
+    }
+
+    for i, part in enumerate(order):
+        value = deltas[part]
+        if value == 0:
+            continue
+
+        if i < 5:
+            sub_part = order[i + 1]
+            sub_value = deltas[sub_part]
+        else:
+            sub_value = 0
+
+        if sub_value == 0 or show_short_version:
+            if future:
+                return _('in %s') % fmt_funcs[part](value)
+            else:
+                return _('%s ago') % fmt_funcs[part](value)
+        if future:
+            return _('in %s and %s') % (fmt_funcs[part](value),
+                fmt_funcs[sub_part](sub_value))
+        else:
+            return _('%s and %s ago') % (fmt_funcs[part](value),
+                fmt_funcs[sub_part](sub_value))
+
+    return _('just now')
+
+
+def fmt_date(date):
+    if date:
+        return date.strftime("%Y-%m-%d %H:%M:%S")
+    return ""
+
+
+def capitalize(x):
+    return x.capitalize()
+
+
+def short_id(x):
+    return x[:12]
--- a/kallithea/model/api_key.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/api_key.py	Thu May 27 21:27:37 2021 +0200
@@ -29,8 +29,7 @@
 import time
 
 from kallithea.lib.utils2 import generate_api_key
-from kallithea.model.db import User, UserApiKeys
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 
 
 log = logging.getLogger(__name__)
@@ -44,14 +43,14 @@
         :param description: description of ApiKey
         :param lifetime: expiration time in seconds
         """
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
 
-        new_api_key = UserApiKeys()
+        new_api_key = db.UserApiKeys()
         new_api_key.api_key = generate_api_key()
         new_api_key.user_id = user.user_id
         new_api_key.description = description
         new_api_key.expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
-        Session().add(new_api_key)
+        meta.Session().add(new_api_key)
 
         return new_api_key
 
@@ -60,19 +59,19 @@
         Deletes given api_key, if user is set it also filters the object for
         deletion by given user.
         """
-        api_key = UserApiKeys.query().filter(UserApiKeys.api_key == api_key)
+        api_key = db.UserApiKeys.query().filter(db.UserApiKeys.api_key == api_key)
 
         if user is not None:
-            user = User.guess_instance(user)
-            api_key = api_key.filter(UserApiKeys.user_id == user.user_id)
+            user = db.User.guess_instance(user)
+            api_key = api_key.filter(db.UserApiKeys.user_id == user.user_id)
 
         api_key = api_key.scalar()
-        Session().delete(api_key)
+        meta.Session().delete(api_key)
 
     def get_api_keys(self, user, show_expired=True):
-        user = User.guess_instance(user)
-        user_api_keys = UserApiKeys.query() \
-            .filter(UserApiKeys.user_id == user.user_id)
+        user = db.User.guess_instance(user)
+        user_api_keys = db.UserApiKeys.query() \
+            .filter(db.UserApiKeys.user_id == user.user_id)
         if not show_expired:
             user_api_keys = user_api_keys.filter_by(is_expired=False)
         return user_api_keys
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/model/async_tasks.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,233 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.model.async_tasks
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Kallithea task modules, containing all task that suppose to be run
+by celery daemon
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: Oct 6, 2010
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+"""
+
+import os
+import traceback
+from collections import OrderedDict
+from operator import itemgetter
+from time import mktime
+
+import celery.utils.log
+from tg import config
+
+import kallithea
+from kallithea.lib import celerylib, conf, ext_json
+from kallithea.lib.utils2 import asbool, ascii_bytes
+from kallithea.lib.vcs.utils import author_email, author_name
+from kallithea.model import db, meta
+
+
+__all__ = ['get_commits_stats']
+
+
+log = celery.utils.log.get_task_logger(__name__)
+
+
+def _author_username(author):
+    """Return the username of the user identified by the email part of the 'author' string,
+    default to the name or email.
+    Kind of similar to h.person() ."""
+    email = author_email(author)
+    if email:
+        user = db.User.get_by_email(email)
+        if user is not None:
+            return user.username
+    # Still nothing?  Just pass back the author name if any, else the email
+    return author_name(author) or email
+
+
+@celerylib.task
+def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
+    lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y,
+                            ts_max_y)
+    log.info('running task with lockkey %s', lockkey)
+    try:
+        lock = celerylib.DaemonLock(os.path.join(config['cache_dir'], lockkey))
+
+        co_day_auth_aggr = {}
+        commits_by_day_aggregate = {}
+        db_repo = db.Repository.get_by_repo_name(repo_name)
+        if db_repo is None:
+            return
+
+        scm_repo = db_repo.scm_instance
+        repo_size = scm_repo.count()
+        # return if repo have no revisions
+        if repo_size < 1:
+            lock.release()
+            return
+
+        skip_date_limit = True
+        parse_limit = int(config.get('commit_parse_limit'))
+        last_rev = None
+        last_cs = None
+        timegetter = itemgetter('time')
+
+        dbrepo = db.Repository.query() \
+            .filter(db.Repository.repo_name == repo_name).scalar()
+        cur_stats = db.Statistics.query() \
+            .filter(db.Statistics.repository == dbrepo).scalar()
+
+        if cur_stats is not None:
+            last_rev = cur_stats.stat_on_revision
+
+        if last_rev == scm_repo.get_changeset().revision and repo_size > 1:
+            # pass silently without any work if we're not on first revision or
+            # current state of parsing revision(from db marker) is the
+            # last revision
+            lock.release()
+            return
+
+        if cur_stats:
+            commits_by_day_aggregate = OrderedDict(ext_json.loads(
+                                        cur_stats.commit_activity_combined))
+            co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)
+
+        log.debug('starting parsing %s', parse_limit)
+
+        last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
+        log.debug('Getting revisions from %s to %s',
+             last_rev, last_rev + parse_limit
+        )
+        usernames_cache = {}
+        for cs in scm_repo[last_rev:last_rev + parse_limit]:
+            log.debug('parsing %s', cs)
+            last_cs = cs  # remember last parsed changeset
+            tt = cs.date.timetuple()
+            k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))
+
+            # get username from author - similar to what h.person does
+            username = usernames_cache.get(cs.author)
+            if username is None:
+                username = _author_username(cs.author)
+                usernames_cache[cs.author] = username
+
+            if username in co_day_auth_aggr:
+                try:
+                    l = [timegetter(x) for x in
+                         co_day_auth_aggr[username]['data']]
+                    time_pos = l.index(k)
+                except ValueError:
+                    time_pos = None
+
+                if time_pos is not None and time_pos >= 0:
+                    datadict = \
+                        co_day_auth_aggr[username]['data'][time_pos]
+
+                    datadict["commits"] += 1
+                    datadict["added"] += len(cs.added)
+                    datadict["changed"] += len(cs.changed)
+                    datadict["removed"] += len(cs.removed)
+
+                else:
+                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
+
+                        datadict = {"time": k,
+                                    "commits": 1,
+                                    "added": len(cs.added),
+                                    "changed": len(cs.changed),
+                                    "removed": len(cs.removed),
+                                   }
+                        co_day_auth_aggr[username]['data'] \
+                            .append(datadict)
+
+            else:
+                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
+                    co_day_auth_aggr[username] = {
+                                        "label": username,
+                                        "data": [{"time": k,
+                                                 "commits": 1,
+                                                 "added": len(cs.added),
+                                                 "changed": len(cs.changed),
+                                                 "removed": len(cs.removed),
+                                                 }],
+                                        "schema": ["commits"],
+                                        }
+
+            # gather all data by day
+            if k in commits_by_day_aggregate:
+                commits_by_day_aggregate[k] += 1
+            else:
+                commits_by_day_aggregate[k] = 1
+
+        overview_data = sorted(commits_by_day_aggregate.items(),
+                               key=itemgetter(0))
+
+        stats = cur_stats if cur_stats else db.Statistics()
+        stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
+        stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data))
+
+        log.debug('last revision %s', last_rev)
+        leftovers = len(scm_repo.revisions[last_rev:])
+        log.debug('revisions to parse %s', leftovers)
+
+        if last_rev == 0 or leftovers < parse_limit:
+            log.debug('getting code trending stats')
+            stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name)))
+
+        try:
+            stats.repository = dbrepo
+            stats.stat_on_revision = last_cs.revision if last_cs else 0
+            meta.Session().add(stats)
+            meta.Session().commit()
+        except:
+            log.error(traceback.format_exc())
+            meta.Session().rollback()
+            lock.release()
+            return
+
+        # final release
+        lock.release()
+
+        # execute another task if celery is enabled
+        if len(scm_repo.revisions) > 1 and asbool(kallithea.CONFIG.get('use_celery')) and recurse_limit > 0:
+            get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
+        elif recurse_limit <= 0:
+            log.debug('Not recursing - limit has been reached')
+        else:
+            log.debug('Not recursing')
+    except celerylib.LockHeld:
+        log.info('Task with key %s already running', lockkey)
+
+
+def __get_codes_stats(repo_name):
+    scm_repo = db.Repository.get_by_repo_name(repo_name).scm_instance
+
+    tip = scm_repo.get_changeset()
+    code_stats = {}
+
+    for _topnode, _dirnodes, filenodes in tip.walk('/'):
+        for filenode in filenodes:
+            ext = filenode.extension.lower()
+            if ext in conf.LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary:
+                if ext in code_stats:
+                    code_stats[ext] += 1
+                else:
+                    code_stats[ext] = 1
+
+    return code_stats or {}
--- a/kallithea/model/changeset_status.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/changeset_status.py	Thu May 27 21:27:37 2021 +0200
@@ -29,7 +29,7 @@
 
 from sqlalchemy.orm import joinedload
 
-from kallithea.model.db import ChangesetStatus, PullRequest, Repository, Session, User
+from kallithea.model import db, meta
 
 
 log = logging.getLogger(__name__)
@@ -39,22 +39,22 @@
 
     def _get_status_query(self, repo, revision, pull_request,
                           with_revisions=False):
-        repo = Repository.guess_instance(repo)
+        repo = db.Repository.guess_instance(repo)
 
-        q = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == repo)
+        q = db.ChangesetStatus.query() \
+            .filter(db.ChangesetStatus.repo == repo)
         if not with_revisions:
             # only report the latest vote across all users! TODO: be smarter!
-            q = q.filter(ChangesetStatus.version == 0)
+            q = q.filter(db.ChangesetStatus.version == 0)
 
         if revision:
-            q = q.filter(ChangesetStatus.revision == revision)
+            q = q.filter(db.ChangesetStatus.revision == revision)
         elif pull_request:
-            pull_request = PullRequest.guess_instance(pull_request)
-            q = q.filter(ChangesetStatus.pull_request == pull_request)
+            pull_request = db.PullRequest.guess_instance(pull_request)
+            q = q.filter(db.ChangesetStatus.pull_request == pull_request)
         else:
             raise Exception('Please specify revision or pull_request')
-        q = q.order_by(ChangesetStatus.version.asc())
+        q = q.order_by(db.ChangesetStatus.version.asc())
         return q
 
     def _calculate_status(self, statuses):
@@ -64,15 +64,15 @@
         """
 
         if not statuses:
-            return ChangesetStatus.STATUS_UNDER_REVIEW
+            return db.ChangesetStatus.STATUS_UNDER_REVIEW
 
-        if all(st and st.status == ChangesetStatus.STATUS_APPROVED for st in statuses):
-            return ChangesetStatus.STATUS_APPROVED
+        if all(st and st.status == db.ChangesetStatus.STATUS_APPROVED for st in statuses):
+            return db.ChangesetStatus.STATUS_APPROVED
 
-        if any(st and st.status == ChangesetStatus.STATUS_REJECTED for st in statuses):
-            return ChangesetStatus.STATUS_REJECTED
+        if any(st and st.status == db.ChangesetStatus.STATUS_REJECTED for st in statuses):
+            return db.ChangesetStatus.STATUS_REJECTED
 
-        return ChangesetStatus.STATUS_UNDER_REVIEW
+        return db.ChangesetStatus.STATUS_UNDER_REVIEW
 
     def calculate_pull_request_result(self, pull_request):
         """
@@ -94,9 +94,9 @@
         for user in pull_request.get_reviewer_users():
             st = cs_statuses.get(user.username)
             relevant_statuses.append(st)
-            status = ChangesetStatus.STATUS_NOT_REVIEWED if st is None else st.status
-            if status in (ChangesetStatus.STATUS_NOT_REVIEWED,
-                          ChangesetStatus.STATUS_UNDER_REVIEW):
+            status = db.ChangesetStatus.STATUS_NOT_REVIEWED if st is None else st.status
+            if status in (db.ChangesetStatus.STATUS_NOT_REVIEWED,
+                          db.ChangesetStatus.STATUS_UNDER_REVIEW):
                 pull_request_pending_reviewers.append(user)
             pull_request_reviewers.append((user, status))
 
@@ -130,7 +130,7 @@
         # returned from pull_request
         status = q.first()
         if as_str:
-            return str(status.status) if status else ChangesetStatus.DEFAULT
+            return str(status.status) if status else db.ChangesetStatus.DEFAULT
         return status
 
     def set_status(self, repo, status, user, comment, revision=None,
@@ -146,20 +146,20 @@
         :param revision:
         :param pull_request:
         """
-        repo = Repository.guess_instance(repo)
+        repo = db.Repository.guess_instance(repo)
 
-        q = ChangesetStatus.query()
+        q = db.ChangesetStatus.query()
         if revision is not None:
             assert pull_request is None
-            q = q.filter(ChangesetStatus.repo == repo)
-            q = q.filter(ChangesetStatus.revision == revision)
+            q = q.filter(db.ChangesetStatus.repo == repo)
+            q = q.filter(db.ChangesetStatus.revision == revision)
             revisions = [revision]
         else:
             assert pull_request is not None
-            pull_request = PullRequest.guess_instance(pull_request)
+            pull_request = db.PullRequest.guess_instance(pull_request)
             repo = pull_request.org_repo
-            q = q.filter(ChangesetStatus.repo == repo)
-            q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
+            q = q.filter(db.ChangesetStatus.repo == repo)
+            q = q.filter(db.ChangesetStatus.revision.in_(pull_request.revisions))
             revisions = pull_request.revisions
         cur_statuses = q.all()
 
@@ -169,14 +169,14 @@
 
         new_statuses = []
         for rev in revisions:
-            new_status = ChangesetStatus()
+            new_status = db.ChangesetStatus()
             new_status.version = 0 # default
-            new_status.author = User.guess_instance(user)
-            new_status.repo = Repository.guess_instance(repo)
+            new_status.author = db.User.guess_instance(user)
+            new_status.repo = db.Repository.guess_instance(repo)
             new_status.status = status
             new_status.comment = comment
             new_status.revision = rev
             new_status.pull_request = pull_request
             new_statuses.append(new_status)
-            Session().add(new_status)
+            meta.Session().add(new_status)
         return new_statuses
--- a/kallithea/model/comment.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/comment.py	Thu May 27 21:27:37 2021 +0200
@@ -28,67 +28,50 @@
 import logging
 from collections import defaultdict
 
-from tg.i18n import ugettext as _
-
-from kallithea.lib import helpers as h
-from kallithea.lib.utils2 import extract_mentioned_users
-from kallithea.model.db import ChangesetComment, PullRequest, Repository, User
-from kallithea.model.meta import Session
-from kallithea.model.notification import NotificationModel
+from kallithea.lib import webutils
+from kallithea.lib.utils import extract_mentioned_users
+from kallithea.model import db, meta, notification
 
 
 log = logging.getLogger(__name__)
 
 
 def _list_changeset_commenters(revision):
-    return (Session().query(User)
-        .join(ChangesetComment.author)
-        .filter(ChangesetComment.revision == revision)
+    return (meta.Session().query(db.User)
+        .join(db.ChangesetComment.author)
+        .filter(db.ChangesetComment.revision == revision)
         .all())
 
 def _list_pull_request_commenters(pull_request):
-    return (Session().query(User)
-        .join(ChangesetComment.author)
-        .filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
+    return (meta.Session().query(db.User)
+        .join(db.ChangesetComment.author)
+        .filter(db.ChangesetComment.pull_request_id == pull_request.pull_request_id)
         .all())
 
 
 class ChangesetCommentsModel(object):
 
-    def _get_notification_data(self, repo, comment, author, comment_text,
-                               line_no=None, revision=None, pull_request=None,
-                               status_change=None, closing_pr=False):
-        """
-        :returns: tuple (subj,body,recipients,notification_type,email_kwargs)
-        """
-        # make notification
-        body = comment_text  # text of the comment
-        line = ''
-        if line_no:
-            line = _('on line %s') % line_no
+    def create_notification(self, repo, comment, author, comment_text,
+                                line_no=None, revision=None, pull_request=None,
+                                status_change=None, closing_pr=False):
 
         # changeset
         if revision:
-            notification_type = NotificationModel.TYPE_CHANGESET_COMMENT
+            notification_type = notification.NotificationModel.TYPE_CHANGESET_COMMENT
             cs = repo.scm_instance.get_changeset(revision)
-            desc = cs.short_id
 
-            threading = ['%s-rev-%s@%s' % (repo.repo_name, revision, h.canonical_hostname())]
+            threading = ['%s-rev-%s@%s' % (repo.repo_name, revision, webutils.canonical_hostname())]
             if line_no: # TODO: url to file _and_ line number
                 threading.append('%s-rev-%s-line-%s@%s' % (repo.repo_name, revision, line_no,
-                                                           h.canonical_hostname()))
-            comment_url = h.canonical_url('changeset_home',
+                                                           webutils.canonical_hostname()))
+            comment_url = webutils.canonical_url('changeset_home',
                 repo_name=repo.repo_name,
                 revision=revision,
                 anchor='comment-%s' % comment.comment_id)
-            subj = h.link_to(
-                'Re changeset: %(desc)s %(line)s' %
-                          {'desc': desc, 'line': line},
-                 comment_url)
             # get the current participants of this changeset
             recipients = _list_changeset_commenters(revision)
             # add changeset author if it's known locally
-            cs_author = User.get_from_cs_author(cs.author)
+            cs_author = db.User.get_from_cs_author(cs.author)
             if not cs_author:
                 # use repo owner if we cannot extract the author correctly
                 # FIXME: just use committer name even if not a user
@@ -98,40 +81,32 @@
             email_kwargs = {
                 'status_change': status_change,
                 'cs_comment_user': author.full_name_and_username,
-                'cs_target_repo': h.canonical_url('summary_home', repo_name=repo.repo_name),
+                'cs_target_repo': webutils.canonical_url('summary_home', repo_name=repo.repo_name),
                 'cs_comment_url': comment_url,
-                'cs_url': h.canonical_url('changeset_home', repo_name=repo.repo_name, revision=revision),
-                'raw_id': revision,
+                'cs_url': webutils.canonical_url('changeset_home', repo_name=repo.repo_name, revision=revision),
                 'message': cs.message,
-                'message_short': h.shorter(cs.message, 50, firstline=True),
+                'message_short': webutils.shorter(cs.message, 50, firstline=True),
                 'cs_author': cs_author,
+                'cs_author_username': cs_author.username,
                 'repo_name': repo.repo_name,
-                'short_id': h.short_id(revision),
+                'short_id': revision[:12],
                 'branch': cs.branch,
-                'comment_username': author.username,
                 'threading': threading,
             }
         # pull request
         elif pull_request:
-            notification_type = NotificationModel.TYPE_PULL_REQUEST_COMMENT
-            desc = comment.pull_request.title
+            notification_type = notification.NotificationModel.TYPE_PULL_REQUEST_COMMENT
             _org_ref_type, org_ref_name, _org_rev = comment.pull_request.org_ref.split(':')
             _other_ref_type, other_ref_name, _other_rev = comment.pull_request.other_ref.split(':')
             threading = ['%s-pr-%s@%s' % (pull_request.other_repo.repo_name,
                                           pull_request.pull_request_id,
-                                          h.canonical_hostname())]
+                                          webutils.canonical_hostname())]
             if line_no: # TODO: url to file _and_ line number
                 threading.append('%s-pr-%s-line-%s@%s' % (pull_request.other_repo.repo_name,
                                                           pull_request.pull_request_id, line_no,
-                                                          h.canonical_hostname()))
+                                                          webutils.canonical_hostname()))
             comment_url = pull_request.url(canonical=True,
                 anchor='comment-%s' % comment.comment_id)
-            subj = h.link_to(
-                'Re pull request %(pr_nice_id)s: %(desc)s %(line)s' %
-                          {'desc': desc,
-                           'pr_nice_id': comment.pull_request.nice_id(),
-                           'line': line},
-                comment_url)
             # get the current participants of this pull request
             recipients = _list_pull_request_commenters(pull_request)
             recipients.append(pull_request.owner)
@@ -140,27 +115,43 @@
             # set some variables for email notification
             email_kwargs = {
                 'pr_title': pull_request.title,
-                'pr_title_short': h.shorter(pull_request.title, 50),
+                'pr_title_short': webutils.shorter(pull_request.title, 50),
                 'pr_nice_id': pull_request.nice_id(),
                 'status_change': status_change,
                 'closing_pr': closing_pr,
                 'pr_comment_url': comment_url,
                 'pr_url': pull_request.url(canonical=True),
                 'pr_comment_user': author.full_name_and_username,
-                'pr_target_repo': h.canonical_url('summary_home',
+                'pr_target_repo': webutils.canonical_url('summary_home',
                                    repo_name=pull_request.other_repo.repo_name),
                 'pr_target_branch': other_ref_name,
-                'pr_source_repo': h.canonical_url('summary_home',
+                'pr_source_repo': webutils.canonical_url('summary_home',
                                    repo_name=pull_request.org_repo.repo_name),
                 'pr_source_branch': org_ref_name,
                 'pr_owner': pull_request.owner,
                 'pr_owner_username': pull_request.owner.username,
                 'repo_name': pull_request.other_repo.repo_name,
-                'comment_username': author.username,
                 'threading': threading,
             }
 
-        return subj, body, recipients, notification_type, email_kwargs
+        email_kwargs['is_mention'] = False
+        # create notification objects, and emails
+        notification.NotificationModel().create(
+            created_by=author, body=comment_text,
+            recipients=recipients, type_=notification_type,
+            email_kwargs=email_kwargs,
+        )
+
+        mention_recipients = extract_mentioned_users(comment_text).difference(recipients)
+        if mention_recipients:
+            email_kwargs['is_mention'] = True
+            notification.NotificationModel().create(
+                created_by=author, body=comment_text,
+                recipients=mention_recipients,
+                type_=notification_type,
+                email_kwargs=email_kwargs
+            )
+
 
     def create(self, text, repo, author, revision=None, pull_request=None,
                f_path=None, line_no=None, status_change=None, closing_pr=False,
@@ -175,9 +166,9 @@
             log.warning('Missing text for comment, skipping...')
             return None
 
-        repo = Repository.guess_instance(repo)
-        author = User.guess_instance(author)
-        comment = ChangesetComment()
+        repo = db.Repository.guess_instance(repo)
+        author = db.User.guess_instance(author)
+        comment = db.ChangesetComment()
         comment.repo = repo
         comment.author = author
         comment.text = text
@@ -187,49 +178,25 @@
         if revision is not None:
             comment.revision = revision
         elif pull_request is not None:
-            pull_request = PullRequest.guess_instance(pull_request)
+            pull_request = db.PullRequest.guess_instance(pull_request)
             comment.pull_request = pull_request
         else:
             raise Exception('Please specify revision or pull_request_id')
 
-        Session().add(comment)
-        Session().flush()
+        meta.Session().add(comment)
+        meta.Session().flush()
 
         if send_email:
-            (subj, body, recipients, notification_type,
-             email_kwargs) = self._get_notification_data(
-                                repo, comment, author,
-                                comment_text=text,
-                                line_no=line_no,
-                                revision=revision,
-                                pull_request=pull_request,
-                                status_change=status_change,
-                                closing_pr=closing_pr)
-            email_kwargs['is_mention'] = False
-            # create notification objects, and emails
-            NotificationModel().create(
-                created_by=author, subject=subj, body=body,
-                recipients=recipients, type_=notification_type,
-                email_kwargs=email_kwargs,
+            self.create_notification(
+                repo, comment, author, text, line_no, revision, pull_request,
+                status_change, closing_pr
             )
 
-            mention_recipients = extract_mentioned_users(body).difference(recipients)
-            if mention_recipients:
-                email_kwargs['is_mention'] = True
-                subj = _('[Mention]') + ' ' + subj
-                # FIXME: this subject is wrong and unused!
-                NotificationModel().create(
-                    created_by=author, subject=subj, body=body,
-                    recipients=mention_recipients,
-                    type_=notification_type,
-                    email_kwargs=email_kwargs
-                )
-
         return comment
 
     def delete(self, comment):
-        comment = ChangesetComment.guess_instance(comment)
-        Session().delete(comment)
+        comment = db.ChangesetComment.guess_instance(comment)
+        meta.Session().delete(comment)
 
         return comment
 
@@ -269,34 +236,34 @@
         if inline is None and f_path is not None:
             raise Exception("f_path only makes sense for inline comments.")
 
-        q = Session().query(ChangesetComment)
+        q = meta.Session().query(db.ChangesetComment)
 
         if inline:
             if f_path is not None:
                 # inline comments for a given file...
-                q = q.filter(ChangesetComment.f_path == f_path)
+                q = q.filter(db.ChangesetComment.f_path == f_path)
                 if line_no is None:
                     # ... on any line
-                    q = q.filter(ChangesetComment.line_no != None)
+                    q = q.filter(db.ChangesetComment.line_no != None)
                 else:
                     # ... on specific line
-                    q = q.filter(ChangesetComment.line_no == line_no)
+                    q = q.filter(db.ChangesetComment.line_no == line_no)
             else:
                 # all inline comments
-                q = q.filter(ChangesetComment.line_no != None) \
-                    .filter(ChangesetComment.f_path != None)
+                q = q.filter(db.ChangesetComment.line_no != None) \
+                    .filter(db.ChangesetComment.f_path != None)
         else:
             # all general comments
-            q = q.filter(ChangesetComment.line_no == None) \
-                .filter(ChangesetComment.f_path == None)
+            q = q.filter(db.ChangesetComment.line_no == None) \
+                .filter(db.ChangesetComment.f_path == None)
 
         if revision is not None:
-            q = q.filter(ChangesetComment.revision == revision) \
-                .filter(ChangesetComment.repo_id == repo_id)
+            q = q.filter(db.ChangesetComment.revision == revision) \
+                .filter(db.ChangesetComment.repo_id == repo_id)
         elif pull_request is not None:
-            pull_request = PullRequest.guess_instance(pull_request)
-            q = q.filter(ChangesetComment.pull_request == pull_request)
+            pull_request = db.PullRequest.guess_instance(pull_request)
+            q = q.filter(db.ChangesetComment.pull_request == pull_request)
         else:
             raise Exception('Please specify either revision or pull_request')
 
-        return q.order_by(ChangesetComment.created_on).all()
+        return q.order_by(db.ChangesetComment.created_on).all()
--- a/kallithea/model/db.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/db.py	Thu May 27 21:27:37 2021 +0200
@@ -37,6 +37,7 @@
 
 import ipaddr
 import sqlalchemy
+import urlobject
 from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Index, Integer, LargeBinary, String, Unicode, UnicodeText, UniqueConstraint
 from sqlalchemy.ext.hybrid import hybrid_property
 from sqlalchemy.orm import class_mapper, joinedload, relationship, validates
@@ -44,27 +45,22 @@
 from webob.exc import HTTPNotFound
 
 import kallithea
-from kallithea.lib import ext_json
+from kallithea.lib import ext_json, ssh, webutils
 from kallithea.lib.exceptions import DefaultUserException
-from kallithea.lib.utils2 import (Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, str2bool,
-                                  urlreadable)
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.model.meta import Base, Session
+from kallithea.lib.utils2 import (asbool, ascii_bytes, aslist, check_git_version, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int,
+                                  safe_str, urlreadable)
+from kallithea.lib.vcs import get_repo
+from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
+from kallithea.lib.vcs.utils import author_email, author_name
+from kallithea.model import meta
 
 
-URL_SEP = '/'
 log = logging.getLogger(__name__)
 
 #==============================================================================
 # BASE CLASSES
 #==============================================================================
 
-def _hash_key(k):
-    return hashlib.md5(safe_bytes(k)).hexdigest()
-
-
 class BaseDbModel(object):
     """
     Base Model for all classes
@@ -113,7 +109,7 @@
 
     @classmethod
     def query(cls):
-        return Session().query(cls)
+        return meta.Session().query(cls)
 
     @classmethod
     def get(cls, id_):
@@ -163,7 +159,7 @@
     @classmethod
     def delete(cls, id_):
         obj = cls.query().get(id_)
-        Session().delete(obj)
+        meta.Session().delete(obj)
 
     def __repr__(self):
         return '<DB:%s>' % (self.__class__.__name__)
@@ -171,11 +167,10 @@
 
 _table_args_default_dict = {'extend_existing': True,
                             'mysql_engine': 'InnoDB',
-                            'mysql_charset': 'utf8',
                             'sqlite_autoincrement': True,
                            }
 
-class Setting(Base, BaseDbModel):
+class Setting(meta.Base, BaseDbModel):
     __tablename__ = 'settings'
     __table_args__ = (
         _table_args_default_dict,
@@ -185,10 +180,9 @@
         'str': safe_bytes,
         'int': safe_int,
         'unicode': safe_str,
-        'bool': str2bool,
+        'bool': asbool,
         'list': functools.partial(aslist, sep=',')
     }
-    DEFAULT_UPDATE_URL = ''
 
     app_settings_id = Column(Integer(), primary_key=True)
     app_settings_name = Column(String(255), nullable=False, unique=True)
@@ -249,10 +243,10 @@
         return res
 
     @classmethod
-    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
+    def create_or_update(cls, key, val=None, type=None):
         """
         Creates or updates Kallithea setting. If updates are triggered, it will only
-        update parameters that are explicitly set. Optional instance will be skipped.
+        update parameters that are explicitly set. 'None' values will be skipped.
 
         :param key:
         :param val:
@@ -261,16 +255,16 @@
         """
         res = cls.get_by_name(key)
         if res is None:
-            val = Optional.extract(val)
-            type = Optional.extract(type)
+            # new setting
+            val = val if val is not None else ''
+            type = type if type is not None else 'unicode'
             res = cls(key, val, type)
-            Session().add(res)
+            meta.Session().add(res)
         else:
-            res.app_settings_name = key
-            if not isinstance(val, Optional):
+            if val is not None:
                 # update if set
                 res.app_settings_value = val
-            if not isinstance(type, Optional):
+            if type is not None:
                 # update if set
                 res.app_settings_type = type
         return res
@@ -312,9 +306,10 @@
 
     @classmethod
     def get_server_info(cls):
+        import platform
+
         import pkg_resources
-        import platform
-        from kallithea.lib.utils import check_git_version
+
         mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
         info = {
             'modules': sorted(mods, key=lambda k: k[0].lower()),
@@ -327,7 +322,7 @@
         return info
 
 
-class Ui(Base, BaseDbModel):
+class Ui(meta.Base, BaseDbModel):
     __tablename__ = 'ui'
     __table_args__ = (
         Index('ui_ui_section_ui_key_idx', 'ui_section', 'ui_key'),
@@ -335,8 +330,8 @@
         _table_args_default_dict,
     )
 
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
+    HOOK_UPDATE = 'changegroup.kallithea_update'
+    HOOK_REPO_SIZE = 'changegroup.kallithea_repo_size'
 
     ui_id = Column(Integer(), primary_key=True)
     ui_section = Column(String(255), nullable=False)
@@ -355,22 +350,15 @@
         setting = cls.get_by_key(section, key)
         if setting is None:
             setting = cls(ui_section=section, ui_key=key)
-            Session().add(setting)
+            meta.Session().add(setting)
         return setting
 
     @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
-        q = q.filter(cls.ui_section == 'hooks')
-        q = q.order_by(cls.ui_section, cls.ui_key)
-        return q.all()
-
-    @classmethod
     def get_custom_hooks(cls):
         q = cls.query()
+        q = q.filter(cls.ui_section == 'hooks')
         q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
-        q = q.filter(cls.ui_section == 'hooks')
+        q = q.filter(cls.ui_active)
         q = q.order_by(cls.ui_section, cls.ui_key)
         return q.all()
 
@@ -390,7 +378,7 @@
             self.ui_section, self.ui_key, self.ui_value)
 
 
-class User(Base, BaseDbModel):
+class User(meta.Base, BaseDbModel):
     __tablename__ = 'users'
     __table_args__ = (
         Index('u_username_idx', 'username'),
@@ -597,12 +585,9 @@
     def get_from_cs_author(cls, author):
         """
         Tries to get User objects out of commit author string
-
-        :param author:
         """
-        from kallithea.lib.helpers import email, author_name
         # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
+        _email = author_email(author)
         if _email:
             user = cls.get_by_email(_email)
             if user is not None:
@@ -669,7 +654,7 @@
         return data
 
 
-class UserApiKeys(Base, BaseDbModel):
+class UserApiKeys(meta.Base, BaseDbModel):
     __tablename__ = 'user_api_keys'
     __table_args__ = (
         Index('uak_api_key_idx', 'api_key'),
@@ -691,7 +676,7 @@
         return (self.expires != -1) & (time.time() > self.expires)
 
 
-class UserEmailMap(Base, BaseDbModel):
+class UserEmailMap(meta.Base, BaseDbModel):
     __tablename__ = 'user_email_map'
     __table_args__ = (
         Index('uem_email_idx', 'email'),
@@ -706,7 +691,7 @@
     @validates('_email')
     def validate_email(self, key, email):
         # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
+        main_email = meta.Session().query(User).filter(User.email == email).scalar()
         if main_email is not None:
             raise AttributeError('email %s is present is user table' % email)
         return email
@@ -720,7 +705,7 @@
         self._email = val.lower() if val else None
 
 
-class UserIpMap(Base, BaseDbModel):
+class UserIpMap(meta.Base, BaseDbModel):
     __tablename__ = 'user_ip_map'
     __table_args__ = (
         UniqueConstraint('user_id', 'ip_addr'),
@@ -748,7 +733,7 @@
         return "<%s %s: %s>" % (self.__class__.__name__, self.user_id, self.ip_addr)
 
 
-class UserLog(Base, BaseDbModel):
+class UserLog(meta.Base, BaseDbModel):
     __tablename__ = 'user_logs'
     __table_args__ = (
         _table_args_default_dict,
@@ -776,7 +761,7 @@
     repository = relationship('Repository', cascade='')
 
 
-class UserGroup(Base, BaseDbModel):
+class UserGroup(meta.Base, BaseDbModel):
     __tablename__ = 'users_groups'
     __table_args__ = (
         _table_args_default_dict,
@@ -857,7 +842,7 @@
         return data
 
 
-class UserGroupMember(Base, BaseDbModel):
+class UserGroupMember(meta.Base, BaseDbModel):
     __tablename__ = 'users_groups_members'
     __table_args__ = (
         _table_args_default_dict,
@@ -875,7 +860,7 @@
         self.user_id = u_id
 
 
-class RepositoryField(Base, BaseDbModel):
+class RepositoryField(meta.Base, BaseDbModel):
     __tablename__ = 'repositories_fields'
     __table_args__ = (
         UniqueConstraint('repository_id', 'field_key'),  # no-multi field
@@ -913,7 +898,7 @@
         return row
 
 
-class Repository(Base, BaseDbModel):
+class Repository(meta.Base, BaseDbModel):
     __tablename__ = 'repositories'
     __table_args__ = (
         Index('r_repo_name_idx', 'repo_name'),
@@ -1029,7 +1014,7 @@
         :param cls:
         :param repo_name:
         """
-        return URL_SEP.join(repo_name.split(os.sep))
+        return kallithea.URL_SEP.join(repo_name.split(os.sep))
 
     @classmethod
     def guess_instance(cls, value):
@@ -1040,9 +1025,9 @@
         """Get the repo, defaulting to database case sensitivity.
         case_insensitive will be slower and should only be specified if necessary."""
         if case_insensitive:
-            q = Session().query(cls).filter(sqlalchemy.func.lower(cls.repo_name) == sqlalchemy.func.lower(repo_name))
+            q = meta.Session().query(cls).filter(sqlalchemy.func.lower(cls.repo_name) == sqlalchemy.func.lower(repo_name))
         else:
-            q = Session().query(cls).filter(cls.repo_name == repo_name)
+            q = meta.Session().query(cls).filter(cls.repo_name == repo_name)
         q = q.options(joinedload(Repository.fork)) \
                 .options(joinedload(Repository.owner)) \
                 .options(joinedload(Repository.group))
@@ -1055,7 +1040,7 @@
         assert repo_full_path.startswith(base_full_path + os.path.sep)
         repo_name = repo_full_path[len(base_full_path) + 1:]
         repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
+        return cls.get_by_repo_name(repo_name.strip(kallithea.URL_SEP))
 
     @classmethod
     def get_repo_forks(cls, repo_id):
@@ -1077,7 +1062,7 @@
 
     @property
     def just_name(self):
-        return self.repo_name.split(URL_SEP)[-1]
+        return self.repo_name.split(kallithea.URL_SEP)[-1]
 
     @property
     def groups_with_parents(self):
@@ -1100,7 +1085,7 @@
         # we need to split the name by / since this is how we store the
         # names in the database, but that eventually needs to be converted
         # into a valid system path
-        p += self.repo_name.split(URL_SEP)
+        p += self.repo_name.split(kallithea.URL_SEP)
         return os.path.join(*p)
 
     def get_new_name(self, repo_name):
@@ -1110,15 +1095,7 @@
         :param group_name:
         """
         path_prefix = self.group.full_path_splitted if self.group else []
-        return URL_SEP.join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui()
+        return kallithea.URL_SEP.join(path_prefix + [repo_name])
 
     @classmethod
     def is_valid(cls, repo_name):
@@ -1163,8 +1140,8 @@
             ))
         if with_pullrequests:
             data['pull_requests'] = repo.pull_requests_other
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
+        settings = Setting.get_app_settings()
+        repository_fields = asbool(settings.get('repository_fields'))
         if repository_fields:
             for f in self.extra_fields:
                 data[f.field_key_prefixed] = f.field_value
@@ -1179,7 +1156,6 @@
     def clone_uri_hidden(self):
         clone_uri = self.clone_uri
         if clone_uri:
-            import urlobject
             url_obj = urlobject.URLObject(self.clone_uri)
             if url_obj.password:
                 clone_uri = url_obj.with_password('*****')
@@ -1193,8 +1169,7 @@
         else:
             clone_uri_tmpl = clone_uri_tmpl.replace('_{repoid}', '{repo}')
 
-        import kallithea.lib.helpers as h
-        prefix_url = h.canonical_url('home')
+        prefix_url = webutils.canonical_url('home')
 
         return get_clone_url(clone_uri_tmpl=clone_uri_tmpl,
                              prefix_url=prefix_url,
@@ -1235,7 +1210,6 @@
 
         :param cs_cache:
         """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
         if cs_cache is None:
             cs_cache = EmptyChangeset()
             # use no-cache version here
@@ -1253,7 +1227,7 @@
                       self.repo_name, cs_cache)
             self.updated_on = last_change
             self.changeset_cache = cs_cache
-            Session().commit()
+            meta.Session().commit()
         else:
             log.debug('changeset_cache for %s already up to date with %s',
                       self.repo_name, cs_cache['raw_id'])
@@ -1315,9 +1289,8 @@
         return grouped
 
     def _repo_size(self):
-        from kallithea.lib import helpers as h
         log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
+        return webutils.format_byte_size(self.scm_instance.size)
 
     #==========================================================================
     # SCM CACHE INSTANCE
@@ -1342,16 +1315,9 @@
 
     def scm_instance_no_cache(self):
         repo_full_path = self.repo_full_path
-        alias = get_scm(repo_full_path)[0]
-        log.debug('Creating instance of %s repository from %s',
-                  alias, self.repo_full_path)
-        backend = get_backend(alias)
-
-        if alias == 'hg':
-            self._scm_instance = backend(repo_full_path, create=False, baseui=self._ui)
-        else:
-            self._scm_instance = backend(repo_full_path, create=False)
-
+        log.debug('Creating instance of repository at %s', repo_full_path)
+        from kallithea.lib.utils import make_ui
+        self._scm_instance = get_repo(repo_full_path, baseui=make_ui(repo_full_path))
         return self._scm_instance
 
     def __json__(self):
@@ -1362,7 +1328,7 @@
         )
 
 
-class RepoGroup(Base, BaseDbModel):
+class RepoGroup(meta.Base, BaseDbModel):
     __tablename__ = 'groups'
     __table_args__ = (
         _table_args_default_dict,
@@ -1406,11 +1372,9 @@
     @classmethod
     def _generate_choice(cls, repo_group):
         """Return tuple with group_id and name as html literal"""
-        from webhelpers2.html import literal
-        import kallithea.lib.helpers as h
         if repo_group is None:
             return (-1, '-- %s --' % _('top level'))
-        return repo_group.group_id, literal(cls.SEP.join(h.html_escape(x) for x in repo_group.full_path_splitted))
+        return repo_group.group_id, webutils.literal(cls.SEP.join(webutils.html_escape(x) for x in repo_group.full_path_splitted))
 
     @classmethod
     def groups_choices(cls, groups):
@@ -1450,7 +1414,7 @@
 
     @property
     def name(self):
-        return self.group_name.split(URL_SEP)[-1]
+        return self.group_name.split(kallithea.URL_SEP)[-1]
 
     @property
     def full_path(self):
@@ -1458,7 +1422,7 @@
 
     @property
     def full_path_splitted(self):
-        return self.group_name.split(URL_SEP)
+        return self.group_name.split(kallithea.URL_SEP)
 
     @property
     def repositories(self):
@@ -1513,7 +1477,7 @@
         """
         path_prefix = (self.parent_group.full_path_splitted if
                        self.parent_group else [])
-        return URL_SEP.join(path_prefix + [group_name])
+        return kallithea.URL_SEP.join(path_prefix + [group_name])
 
     def get_api_data(self):
         """
@@ -1532,7 +1496,7 @@
         return data
 
 
-class Permission(Base, BaseDbModel):
+class Permission(meta.Base, BaseDbModel):
     __tablename__ = 'permissions'
     __table_args__ = (
         Index('p_perm_name_idx', 'permission_name'),
@@ -1557,18 +1521,12 @@
         ('usergroup.write', _('Default user has write access to new user groups')),
         ('usergroup.admin', _('Default user has admin access to new user groups')),
 
-        ('hg.repogroup.create.false', _('Only admins can create repository groups')),
-        ('hg.repogroup.create.true', _('Non-admins can create repository groups')),
-
         ('hg.usergroup.create.false', _('Only admins can create user groups')),
         ('hg.usergroup.create.true', _('Non-admins can create user groups')),
 
         ('hg.create.none', _('Only admins can create top level repositories')),
         ('hg.create.repository', _('Non-admins can create top level repositories')),
 
-        ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
-        ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
-
         ('hg.fork.none', _('Only admins can fork repositories')),
         ('hg.fork.repository', _('Non-admins can fork repositories')),
 
@@ -1586,7 +1544,6 @@
         'group.read',
         'usergroup.read',
         'hg.create.repository',
-        'hg.create.write_on_repogroup.true',
         'hg.fork.repository',
         'hg.register.manual_activate',
         'hg.extern_activate.auto',
@@ -1611,9 +1568,6 @@
         'usergroup.write': 3,
         'usergroup.admin': 4,
 
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
         'hg.usergroup.create.false': 0,
         'hg.usergroup.create.true': 1,
 
@@ -1623,9 +1577,6 @@
         'hg.create.none': 0,
         'hg.create.repository': 1,
 
-        'hg.create.write_on_repogroup.false': 0,
-        'hg.create.write_on_repogroup.true': 1,
-
         'hg.register.none': 0,
         'hg.register.manual_activate': 1,
         'hg.register.auto_activate': 2,
@@ -1652,7 +1603,7 @@
 
     @classmethod
     def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm) \
+        q = meta.Session().query(UserRepoToPerm) \
          .options(joinedload(UserRepoToPerm.repository)) \
          .options(joinedload(UserRepoToPerm.permission)) \
          .filter(UserRepoToPerm.user_id == default_user_id)
@@ -1661,7 +1612,7 @@
 
     @classmethod
     def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm) \
+        q = meta.Session().query(UserRepoGroupToPerm) \
          .options(joinedload(UserRepoGroupToPerm.group)) \
          .options(joinedload(UserRepoGroupToPerm.permission)) \
          .filter(UserRepoGroupToPerm.user_id == default_user_id)
@@ -1670,7 +1621,7 @@
 
     @classmethod
     def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm) \
+        q = meta.Session().query(UserUserGroupToPerm) \
          .options(joinedload(UserUserGroupToPerm.user_group)) \
          .options(joinedload(UserUserGroupToPerm.permission)) \
          .filter(UserUserGroupToPerm.user_id == default_user_id)
@@ -1678,7 +1629,7 @@
         return q.all()
 
 
-class UserRepoToPerm(Base, BaseDbModel):
+class UserRepoToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'repo_to_perm'
     __table_args__ = (
         UniqueConstraint('user_id', 'repository_id', 'permission_id'),
@@ -1700,7 +1651,7 @@
         n.user = user
         n.repository = repository
         n.permission = permission
-        Session().add(n)
+        meta.Session().add(n)
         return n
 
     def __repr__(self):
@@ -1708,7 +1659,7 @@
             self.__class__.__name__, self.user, self.repository, self.permission)
 
 
-class UserUserGroupToPerm(Base, BaseDbModel):
+class UserUserGroupToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'user_user_group_to_perm'
     __table_args__ = (
         UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
@@ -1730,7 +1681,7 @@
         n.user = user
         n.user_group = user_group
         n.permission = permission
-        Session().add(n)
+        meta.Session().add(n)
         return n
 
     def __repr__(self):
@@ -1738,7 +1689,7 @@
             self.__class__.__name__, self.user, self.user_group, self.permission)
 
 
-class UserToPerm(Base, BaseDbModel):
+class UserToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'user_to_perm'
     __table_args__ = (
         UniqueConstraint('user_id', 'permission_id'),
@@ -1757,7 +1708,7 @@
             self.__class__.__name__, self.user, self.permission)
 
 
-class UserGroupRepoToPerm(Base, BaseDbModel):
+class UserGroupRepoToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'users_group_repo_to_perm'
     __table_args__ = (
         UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
@@ -1779,7 +1730,7 @@
         n.users_group = users_group
         n.repository = repository
         n.permission = permission
-        Session().add(n)
+        meta.Session().add(n)
         return n
 
     def __repr__(self):
@@ -1787,7 +1738,7 @@
             self.__class__.__name__, self.users_group, self.repository, self.permission)
 
 
-class UserGroupUserGroupToPerm(Base, BaseDbModel):
+class UserGroupUserGroupToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'user_group_user_group_to_perm'
     __table_args__ = (
         UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
@@ -1809,7 +1760,7 @@
         n.target_user_group = target_user_group
         n.user_group = user_group
         n.permission = permission
-        Session().add(n)
+        meta.Session().add(n)
         return n
 
     def __repr__(self):
@@ -1817,7 +1768,7 @@
             self.__class__.__name__, self.user_group, self.target_user_group, self.permission)
 
 
-class UserGroupToPerm(Base, BaseDbModel):
+class UserGroupToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'users_group_to_perm'
     __table_args__ = (
         UniqueConstraint('users_group_id', 'permission_id',),
@@ -1832,7 +1783,7 @@
     permission = relationship('Permission')
 
 
-class UserRepoGroupToPerm(Base, BaseDbModel):
+class UserRepoGroupToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'user_repo_group_to_perm'
     __table_args__ = (
         UniqueConstraint('user_id', 'group_id', 'permission_id'),
@@ -1854,11 +1805,11 @@
         n.user = user
         n.group = repository_group
         n.permission = permission
-        Session().add(n)
+        meta.Session().add(n)
         return n
 
 
-class UserGroupRepoGroupToPerm(Base, BaseDbModel):
+class UserGroupRepoGroupToPerm(meta.Base, BaseDbModel):
     __tablename__ = 'users_group_repo_group_to_perm'
     __table_args__ = (
         UniqueConstraint('users_group_id', 'group_id'),
@@ -1880,11 +1831,11 @@
         n.users_group = user_group
         n.group = repository_group
         n.permission = permission
-        Session().add(n)
+        meta.Session().add(n)
         return n
 
 
-class Statistics(Base, BaseDbModel):
+class Statistics(meta.Base, BaseDbModel):
     __tablename__ = 'statistics'
     __table_args__ = (
          _table_args_default_dict,
@@ -1900,7 +1851,7 @@
     repository = relationship('Repository', single_parent=True)
 
 
-class UserFollowing(Base, BaseDbModel):
+class UserFollowing(meta.Base, BaseDbModel):
     __tablename__ = 'user_followings'
     __table_args__ = (
         UniqueConstraint('user_id', 'follows_repository_id', name='uq_user_followings_user_repo'),
@@ -1924,7 +1875,7 @@
         return cls.query().filter(cls.follows_repository_id == repo_id)
 
 
-class ChangesetComment(Base, BaseDbModel):
+class ChangesetComment(meta.Base, BaseDbModel):
     __tablename__ = 'changeset_comments'
     __table_args__ = (
         Index('cc_revision_idx', 'revision'),
@@ -1954,9 +1905,8 @@
 
     def url(self):
         anchor = "comment-%s" % self.comment_id
-        import kallithea.lib.helpers as h
         if self.revision:
-            return h.url('changeset_home', repo_name=self.repo.repo_name, revision=self.revision, anchor=anchor)
+            return webutils.url('changeset_home', repo_name=self.repo.repo_name, revision=self.revision, anchor=anchor)
         elif self.pull_request_id is not None:
             return self.pull_request.url(anchor=anchor)
 
@@ -1971,7 +1921,7 @@
         return self.created_on > datetime.datetime.now() - datetime.timedelta(minutes=5)
 
 
-class ChangesetStatus(Base, BaseDbModel):
+class ChangesetStatus(meta.Base, BaseDbModel):
     __tablename__ = 'changeset_statuses'
     __table_args__ = (
         Index('cs_revision_idx', 'revision'),
@@ -2020,7 +1970,7 @@
 
     @classmethod
     def get_status_lbl(cls, value):
-        return cls.STATUSES_DICT.get(value)
+        return str(cls.STATUSES_DICT.get(value))  # using str to evaluate translated LazyString at runtime
 
     @property
     def status_lbl(self):
@@ -2034,7 +1984,7 @@
             )
 
 
-class PullRequest(Base, BaseDbModel):
+class PullRequest(meta.Base, BaseDbModel):
     __tablename__ = 'pull_requests'
     __table_args__ = (
         Index('pr_org_repo_id_idx', 'org_repo_id'),
@@ -2156,11 +2106,12 @@
             status=self.status,
             comments=self.comments,
             statuses=self.statuses,
+            created_on=self.created_on.replace(microsecond=0),
+            updated_on=self.updated_on.replace(microsecond=0),
         )
 
     def url(self, **kwargs):
         canonical = kwargs.pop('canonical', None)
-        import kallithea.lib.helpers as h
         b = self.org_ref_parts[1]
         if b != self.other_ref_parts[1]:
             s = '/_/' + b
@@ -2168,16 +2119,17 @@
             s = '/_/' + self.title
         kwargs['extra'] = urlreadable(s)
         if canonical:
-            return h.canonical_url('pullrequest_show', repo_name=self.other_repo.repo_name,
+            return webutils.canonical_url('pullrequest_show', repo_name=self.other_repo.repo_name,
                                    pull_request_id=self.pull_request_id, **kwargs)
-        return h.url('pullrequest_show', repo_name=self.other_repo.repo_name,
+        return webutils.url('pullrequest_show', repo_name=self.other_repo.repo_name,
                      pull_request_id=self.pull_request_id, **kwargs)
 
 
-class PullRequestReviewer(Base, BaseDbModel):
+class PullRequestReviewer(meta.Base, BaseDbModel):
     __tablename__ = 'pull_request_reviewers'
     __table_args__ = (
         Index('pull_request_reviewers_user_id_idx', 'user_id'),
+        UniqueConstraint('pull_request_id', 'user_id'),
         _table_args_default_dict,
     )
 
@@ -2205,7 +2157,7 @@
     __tablename__ = 'user_to_notification'
 
 
-class Gist(Base, BaseDbModel):
+class Gist(meta.Base, BaseDbModel):
     __tablename__ = 'gists'
     __table_args__ = (
         Index('g_gist_access_id_idx', 'gist_access_id'),
@@ -2213,6 +2165,9 @@
         _table_args_default_dict,
     )
 
+    GIST_STORE_LOC = '.rc_gist_store'
+    GIST_METADATA_FILE = '.rc_gist_metadata'
+
     GIST_PUBLIC = 'public'
     GIST_PRIVATE = 'private'
     DEFAULT_FILENAME = 'gistfile1.txt'
@@ -2257,8 +2212,7 @@
         if alias_url:
             return alias_url.replace('{gistid}', self.gist_access_id)
 
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
+        return webutils.canonical_url('gist', gist_id=self.gist_access_id)
 
     def get_api_data(self):
         """
@@ -2286,13 +2240,13 @@
 
     @property
     def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        from kallithea.model.gist import GIST_STORE_LOC
-        gist_base_path = os.path.join(kallithea.CONFIG['base_path'], GIST_STORE_LOC)
-        return get_repo(os.path.join(gist_base_path, self.gist_access_id))
+        gist_base_path = os.path.join(kallithea.CONFIG['base_path'], self.GIST_STORE_LOC)
+        repo_full_path = os.path.join(gist_base_path, self.gist_access_id)
+        from kallithea.lib.utils import make_ui
+        return get_repo(repo_full_path, baseui=make_ui(repo_full_path))
 
 
-class UserSshKeys(Base, BaseDbModel):
+class UserSshKeys(meta.Base, BaseDbModel):
     __tablename__ = 'user_ssh_keys'
     __table_args__ = (
         Index('usk_fingerprint_idx', 'fingerprint'),
@@ -2316,8 +2270,12 @@
 
     @public_key.setter
     def public_key(self, full_key):
-        # the full public key is too long to be suitable as database key - instead,
-        # use fingerprints similar to 'ssh-keygen -E sha256 -lf ~/.ssh/id_rsa.pub'
+        """The full public key is too long to be suitable as database key.
+        Instead, as a side-effect of setting the public key string, compute the
+        fingerprints according to https://tools.ietf.org/html/rfc4716#section-4
+        BUT using sha256 instead of md5, similar to 'ssh-keygen -E sha256 -lf
+        ~/.ssh/id_rsa.pub' .
+        """
+        keytype, key_bytes, comment = ssh.parse_pub_key(full_key)
         self._public_key = full_key
-        enc_key = safe_bytes(full_key.split(" ")[1])
-        self.fingerprint = base64.b64encode(hashlib.sha256(base64.b64decode(enc_key)).digest()).replace(b'\n', b'').rstrip(b'=').decode()
+        self.fingerprint = base64.b64encode(hashlib.sha256(key_bytes).digest()).replace(b'\n', b'').rstrip(b'=').decode()
--- a/kallithea/model/forms.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/forms.py	Thu May 27 21:27:37 2021 +0200
@@ -39,7 +39,7 @@
 from formencode import All
 from tg.i18n import ugettext as _
 
-from kallithea import BACKENDS
+import kallithea
 from kallithea.model import validators as v
 
 
@@ -238,7 +238,7 @@
     return _PasswordResetConfirmationForm
 
 
-def RepoForm(edit=False, old_data=None, supported_backends=BACKENDS,
+def RepoForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS,
              repo_groups=None, landing_revs=None):
     old_data = old_data or {}
     repo_groups = repo_groups or []
@@ -315,7 +315,7 @@
     return _RepoFieldForm
 
 
-def RepoForkForm(edit=False, old_data=None, supported_backends=BACKENDS,
+def RepoForkForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS,
                  repo_groups=None, landing_revs=None):
     old_data = old_data or {}
     repo_groups = repo_groups or []
@@ -384,11 +384,10 @@
             v.ValidPath(),
             v.UnicodeString(strip=True, min=1, not_empty=True)
         )
-        hooks_changegroup_update = v.StringBoolean(if_missing=False)
-        hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
+        hooks_changegroup_kallithea_update = v.StringBoolean(if_missing=False)
+        hooks_changegroup_kallithea_repo_size = v.StringBoolean(if_missing=False)
 
         extensions_largefiles = v.StringBoolean(if_missing=False)
-        extensions_hgsubversion = v.StringBoolean(if_missing=False)
         extensions_hggit = v.StringBoolean(if_missing=False)
 
     return _ApplicationUiSettingsForm
@@ -396,7 +395,6 @@
 
 def DefaultPermissionsForm(repo_perms_choices, group_perms_choices,
                            user_group_perms_choices, create_choices,
-                           create_on_write_choices, repo_group_create_choices,
                            user_group_create_choices, fork_choices,
                            register_choices, extern_activate_choices):
     class _DefaultPermissionsForm(formencode.Schema):
@@ -411,9 +409,7 @@
         default_user_group_perm = v.OneOf(user_group_perms_choices)
 
         default_repo_create = v.OneOf(create_choices)
-        create_on_write = v.OneOf(create_on_write_choices)
         default_user_group_create = v.OneOf(user_group_create_choices)
-        #default_repo_group_create = v.OneOf(repo_group_create_choices) #not impl. yet
         default_fork = v.OneOf(fork_choices)
 
         default_register = v.OneOf(register_choices)
@@ -435,7 +431,7 @@
     return _CustomDefaultPermissionsForm
 
 
-def DefaultsForm(edit=False, old_data=None, supported_backends=BACKENDS):
+def DefaultsForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS):
     class _DefaultsForm(formencode.Schema):
         allow_extra_fields = True
         filter_extra_fields = True
--- a/kallithea/model/gist.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/gist.py	Thu May 27 21:27:37 2021 +0200
@@ -34,16 +34,11 @@
 
 from kallithea.lib import ext_json
 from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, time_to_datetime
-from kallithea.model.db import Gist, Session, User
-from kallithea.model.repo import RepoModel
-from kallithea.model.scm import ScmModel
+from kallithea.model import db, meta, repo, scm
 
 
 log = logging.getLogger(__name__)
 
-GIST_STORE_LOC = '.rc_gist_store'
-GIST_METADATA_FILE = '.rc_gist_metadata'
-
 
 def make_gist_access_id():
     """Generate a random, URL safe, almost certainly unique gist identifier."""
@@ -61,12 +56,12 @@
 
         :param gist: gist object
         """
-        root_path = RepoModel().repos_path
-        rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
+        root_path = repo.RepoModel().repos_path
+        rm_path = os.path.join(root_path, db.Gist.GIST_STORE_LOC, gist.gist_access_id)
         log.info("Removing %s", rm_path)
         shutil.rmtree(rm_path)
 
-    def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type,
+    def _store_metadata(self, fs_repo, gist_id, gist_access_id, user_id, gist_type,
                         gist_expires):
         """
         store metadata inside the gist, this can be later used for imports
@@ -81,11 +76,11 @@
             'gist_expires': gist_expires,
             'gist_updated': time.time(),
         }
-        with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f:
+        with open(os.path.join(fs_repo.path, '.hg', db.Gist.GIST_METADATA_FILE), 'wb') as f:
             f.write(ascii_bytes(ext_json.dumps(metadata)))
 
     def get_gist(self, gist):
-        return Gist.guess_instance(gist)
+        return db.Gist.guess_instance(gist)
 
     def get_gist_files(self, gist_access_id, revision=None):
         """
@@ -93,12 +88,12 @@
 
         :param gist_access_id:
         """
-        repo = Gist.get_by_access_id(gist_access_id)
-        cs = repo.scm_instance.get_changeset(revision)
+        gist_repo = db.Gist.get_by_access_id(gist_access_id)
+        cs = gist_repo.scm_instance.get_changeset(revision)
         return cs, [n for n in cs.get_node('/')]
 
     def create(self, description, owner, ip_addr, gist_mapping,
-               gist_type=Gist.GIST_PUBLIC, lifetime=-1):
+               gist_type=db.Gist.GIST_PUBLIC, lifetime=-1):
         """
 
         :param description: description of the gist
@@ -107,7 +102,7 @@
         :param gist_type: type of gist private/public
         :param lifetime: in minutes, -1 == forever
         """
-        owner = User.guess_instance(owner)
+        owner = db.User.guess_instance(owner)
         gist_access_id = make_gist_access_id()
         lifetime = safe_int(lifetime, -1)
         gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
@@ -115,21 +110,21 @@
                   time_to_datetime(gist_expires)
                    if gist_expires != -1 else 'forever')
         # create the Database version
-        gist = Gist()
+        gist = db.Gist()
         gist.gist_description = description
         gist.gist_access_id = gist_access_id
         gist.owner_id = owner.user_id
         gist.gist_expires = gist_expires
         gist.gist_type = gist_type
-        Session().add(gist)
-        Session().flush() # make database assign gist.gist_id
-        if gist_type == Gist.GIST_PUBLIC:
+        meta.Session().add(gist)
+        meta.Session().flush() # make database assign gist.gist_id
+        if gist_type == db.Gist.GIST_PUBLIC:
             # use DB ID for easy to use GIST ID
             gist.gist_access_id = str(gist.gist_id)
 
         log.debug('Creating new %s GIST repo %s', gist_type, gist.gist_access_id)
-        repo = RepoModel()._create_filesystem_repo(
-            repo_name=gist.gist_access_id, repo_type='hg', repo_group=GIST_STORE_LOC)
+        fs_repo = repo.RepoModel()._create_filesystem_repo(
+            repo_name=gist.gist_access_id, repo_type='hg', repo_group=db.Gist.GIST_STORE_LOC)
 
         processed_mapping = {}
         for filename in gist_mapping:
@@ -153,10 +148,10 @@
 
         # fake Kallithea Repository object
         fake_repo = AttributeDict(dict(
-            repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
-            scm_instance_no_cache=lambda: repo,
+            repo_name=os.path.join(db.Gist.GIST_STORE_LOC, gist.gist_access_id),
+            scm_instance_no_cache=lambda: fs_repo,
         ))
-        ScmModel().create_nodes(
+        scm.ScmModel().create_nodes(
             user=owner.user_id,
             ip_addr=ip_addr,
             repo=fake_repo,
@@ -165,14 +160,14 @@
             trigger_push_hook=False
         )
 
-        self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
+        self._store_metadata(fs_repo, gist.gist_id, gist.gist_access_id,
                              owner.user_id, gist.gist_type, gist.gist_expires)
         return gist
 
     def delete(self, gist, fs_remove=True):
-        gist = Gist.guess_instance(gist)
+        gist = db.Gist.guess_instance(gist)
         try:
-            Session().delete(gist)
+            meta.Session().delete(gist)
             if fs_remove:
                 self.__delete_gist(gist)
             else:
@@ -183,7 +178,7 @@
 
     def update(self, gist, description, owner, ip_addr, gist_mapping, gist_type,
                lifetime):
-        gist = Gist.guess_instance(gist)
+        gist = db.Gist.guess_instance(gist)
         gist_repo = gist.scm_instance
 
         lifetime = safe_int(lifetime, -1)
@@ -217,14 +212,14 @@
 
         # fake Kallithea Repository object
         fake_repo = AttributeDict(dict(
-            repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
+            repo_name=os.path.join(db.Gist.GIST_STORE_LOC, gist.gist_access_id),
             scm_instance_no_cache=lambda: gist_repo,
         ))
 
         self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
                              owner.user_id, gist.gist_type, gist.gist_expires)
 
-        ScmModel().update_nodes(
+        scm.ScmModel().update_nodes(
             user=owner.user_id,
             ip_addr=ip_addr,
             repo=fake_repo,
--- a/kallithea/model/notification.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/notification.py	Thu May 27 21:27:37 2021 +0200
@@ -27,14 +27,21 @@
 """
 
 import datetime
+import email.message
+import email.utils
 import logging
+import smtplib
+import time
+import traceback
 
-from tg import app_globals
+from tg import app_globals, config
 from tg import tmpl_context as c
 from tg.i18n import ugettext as _
 
-from kallithea.lib import helpers as h
-from kallithea.model.db import User
+from kallithea.lib import celerylib, webutils
+from kallithea.lib.utils2 import asbool
+from kallithea.lib.vcs.utils import author_email
+from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
@@ -49,7 +56,7 @@
     TYPE_PULL_REQUEST = 'pull_request'
     TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
 
-    def create(self, created_by, subject, body, recipients=None,
+    def create(self, created_by, body, recipients=None,
                type_=TYPE_MESSAGE, with_email=True,
                email_kwargs=None, repo_name=None):
         """
@@ -58,7 +65,6 @@
 
         :param created_by: int, str or User instance. User who created this
             notification
-        :param subject:
         :param body:
         :param recipients: list of int, str or User objects, when None
             is given send to all admins
@@ -66,17 +72,16 @@
         :param with_email: send email with this notification
         :param email_kwargs: additional dict to pass as args to email template
         """
-        from kallithea.lib.celerylib import tasks
         email_kwargs = email_kwargs or {}
         if recipients and not getattr(recipients, '__iter__', False):
             raise Exception('recipients must be a list or iterable')
 
-        created_by_obj = User.guess_instance(created_by)
+        created_by_obj = db.User.guess_instance(created_by)
 
         recipients_objs = set()
         if recipients:
             for u in recipients:
-                obj = User.guess_instance(u)
+                obj = db.User.guess_instance(u)
                 if obj is not None:
                     recipients_objs.add(obj)
                 else:
@@ -87,7 +92,7 @@
             )
         elif recipients is None:
             # empty recipients means to all admins
-            recipients_objs = User.query().filter(User.admin == True).all()
+            recipients_objs = db.User.query().filter(db.User.admin == True).all()
             log.debug('sending notifications %s to admins: %s',
                 type_, recipients_objs
             )
@@ -102,16 +107,14 @@
             headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading'])
 
         # this is passed into template
-        created_on = h.fmt_date(datetime.datetime.now())
+        created_on = webutils.fmt_date(datetime.datetime.now())
         html_kwargs = {
-                  'subject': subject,
-                  'body': h.render_w_mentions(body, repo_name),
+                  'body': None if body is None else webutils.render_w_mentions(body, repo_name),
                   'when': created_on,
                   'user': created_by_obj.username,
                   }
 
         txt_kwargs = {
-                  'subject': subject,
                   'body': body,
                   'when': created_on,
                   'user': created_by_obj.username,
@@ -126,12 +129,18 @@
         email_html_body = EmailNotificationModel() \
                             .get_email_tmpl(type_, 'html', **html_kwargs)
 
-        # don't send email to person who created this comment
-        rec_objs = set(recipients_objs).difference(set([created_by_obj]))
+        # don't send email to the person who caused the notification, except for
+        # notifications about new pull requests where the author is explicitly
+        # added.
+        rec_mails = set(obj.email for obj in recipients_objs)
+        if type_ == NotificationModel.TYPE_PULL_REQUEST:
+            rec_mails.add(created_by_obj.email)
+        else:
+            rec_mails.discard(created_by_obj.email)
 
-        # send email with notification to all other participants
-        for rec in rec_objs:
-            tasks.send_email([rec.email], email_subject, email_txt_body,
+        # send email with notification to participants
+        for rec_mail in sorted(rec_mails):
+            send_email([rec_mail], email_subject, email_txt_body,
                      email_html_body, headers,
                      from_name=created_by_obj.full_name_or_username)
 
@@ -159,7 +168,7 @@
             self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment',
         }
         self._subj_map = {
-            self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s'),
+            self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s by %(cs_author_username)s'),
             self.TYPE_MESSAGE: 'Test Message',
             # self.TYPE_PASSWORD_RESET
             self.TYPE_REGISTRATION: _('New user %(new_username)s registered'),
@@ -183,7 +192,7 @@
         bracket_tags = []
         status_change = kwargs.get('status_change')
         if status_change:
-            bracket_tags.append(str(status_change))  # apply str to evaluate LazyString before .join
+            bracket_tags.append(status_change)
         if kwargs.get('closing_pr'):
             bracket_tags.append(_('Closing'))
         if bracket_tags:
@@ -197,12 +206,11 @@
         """
         return generated template for email based on given type
         """
-
-        base = 'email_templates/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type
+        base = 'email/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type
         email_template = self._tmpl_lookup.get_template(base)
         # translator and helpers inject
         _kwargs = {'_': _,
-                   'h': h,
+                   'webutils': webutils,
                    'c': c}
         _kwargs.update(kwargs)
         if content_type == 'html':
@@ -227,3 +235,117 @@
 
         log.debug('rendering tmpl %s with kwargs %s', base, _kwargs)
         return email_template.render_unicode(**_kwargs)
+
+
+@celerylib.task
+def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
+    """
+    Sends an email with defined parameters from the .ini files.
+
+    :param recipients: list of recipients, if this is None, the defined email
+        address from field 'email_to' and all admins is used instead
+    :param subject: subject of the mail
+    :param body: plain text body of the mail
+    :param html_body: html version of body
+    :param headers: dictionary of prepopulated e-mail headers
+    :param from_name: full name to be used as sender of this mail - often a
+    .full_name_or_username value
+    """
+    assert isinstance(recipients, list), recipients
+    if headers is None:
+        headers = {}
+    else:
+        # do not modify the original headers object passed by the caller
+        headers = headers.copy()
+
+    email_config = config
+    email_prefix = email_config.get('email_prefix', '')
+    if email_prefix:
+        subject = "%s %s" % (email_prefix, subject)
+
+    if not recipients:
+        # if recipients are not defined we send to email_config + all admins
+        recipients = [u.email for u in db.User.query()
+                      .filter(db.User.admin == True).all()]
+        if email_config.get('email_to') is not None:
+            recipients += email_config.get('email_to').split(',')
+
+        # If there are still no recipients, there are no admins and no address
+        # configured in email_to, so return.
+        if not recipients:
+            log.error("No recipients specified and no fallback available.")
+            return
+
+        log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
+
+    # SMTP sender
+    app_email_from = email_config.get('app_email_from', 'Kallithea')
+    # 'From' header
+    if from_name is not None:
+        # set From header based on from_name but with a generic e-mail address
+        # In case app_email_from is in "Some Name <e-mail>" format, we first
+        # extract the e-mail address.
+        envelope_addr = author_email(app_email_from)
+        headers['From'] = '"%s" <%s>' % (
+            email.utils.quote('%s (no-reply)' % from_name),
+            envelope_addr)
+
+    smtp_server = email_config.get('smtp_server')
+    smtp_port = email_config.get('smtp_port')
+    smtp_use_tls = asbool(email_config.get('smtp_use_tls'))
+    smtp_use_ssl = asbool(email_config.get('smtp_use_ssl'))
+    smtp_auth = email_config.get('smtp_auth')  # undocumented - overrule automatic choice of auth mechanism
+    smtp_username = email_config.get('smtp_username')
+    smtp_password = email_config.get('smtp_password')
+
+    logmsg = ("Mail details:\n"
+              "recipients: %s\n"
+              "headers: %s\n"
+              "subject: %s\n"
+              "body:\n%s\n"
+              "html:\n%s\n"
+              % (' '.join(recipients), headers, subject, body, html_body))
+
+    if smtp_server:
+        log.debug("Sending e-mail. " + logmsg)
+    else:
+        log.error("SMTP mail server not configured - cannot send e-mail.")
+        log.warning(logmsg)
+        return
+
+    msg = email.message.EmailMessage()
+    msg['Subject'] = subject
+    msg['From'] = app_email_from  # fallback - might be overridden by a header
+    msg['To'] = ', '.join(recipients)
+    msg['Date'] = email.utils.formatdate(time.time())
+
+    for key, value in headers.items():
+        del msg[key]  # Delete key first to make sure add_header will replace header (if any), no matter the casing
+        msg.add_header(key, value)
+
+    msg.set_content(body)
+    msg.add_alternative(html_body, subtype='html')
+
+    try:
+        if smtp_use_ssl:
+            smtp_serv = smtplib.SMTP_SSL(smtp_server, smtp_port)
+        else:
+            smtp_serv = smtplib.SMTP(smtp_server, smtp_port)
+
+        if smtp_use_tls:
+            smtp_serv.starttls()
+
+        if smtp_auth:
+            smtp_serv.ehlo()  # populate esmtp_features
+            smtp_serv.esmtp_features["auth"] = smtp_auth
+
+        if smtp_username and smtp_password is not None:
+            smtp_serv.login(smtp_username, smtp_password)
+
+        smtp_serv.sendmail(app_email_from, recipients, msg.as_string())
+        smtp_serv.quit()
+
+        log.info('Mail was sent to: %s' % recipients)
+    except:
+        log.error('Mail sending failed')
+        log.error(traceback.format_exc())
--- a/kallithea/model/permission.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/permission.py	Thu May 27 21:27:37 2021 +0200
@@ -31,8 +31,8 @@
 
 from sqlalchemy.exc import DatabaseError
 
-from kallithea.lib.utils2 import str2bool
-from kallithea.model.db import Permission, Session, User, UserRepoGroupToPerm, UserRepoToPerm, UserToPerm, UserUserGroupToPerm
+from kallithea.lib.utils2 import asbool
+from kallithea.model import db, meta
 
 
 log = logging.getLogger(__name__)
@@ -47,11 +47,11 @@
         """
         Create permissions for whole system
         """
-        for p in Permission.PERMS:
-            if not Permission.get_by_key(p[0]):
-                new_perm = Permission()
+        for p in db.Permission.PERMS:
+            if not db.Permission.get_by_key(p[0]):
+                new_perm = db.Permission()
                 new_perm.permission_name = p[0]
-                Session().add(new_perm)
+                meta.Session().add(new_perm)
 
     def create_default_permissions(self, user, force=False):
         """
@@ -61,80 +61,78 @@
 
         :param user:
         """
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
 
         def _make_perm(perm):
-            new_perm = UserToPerm()
+            new_perm = db.UserToPerm()
             new_perm.user = user
-            new_perm.permission = Permission.get_by_key(perm)
+            new_perm.permission = db.Permission.get_by_key(perm)
             return new_perm
 
         def _get_group(perm_name):
             return '.'.join(perm_name.split('.')[:1])
 
-        perms = UserToPerm.query().filter(UserToPerm.user == user).all()
+        perms = db.UserToPerm.query().filter(db.UserToPerm.user == user).all()
         defined_perms_groups = set(_get_group(x.permission.permission_name) for x in perms)
         log.debug('GOT ALREADY DEFINED:%s', perms)
 
         if force:
             for perm in perms:
-                Session().delete(perm)
-            Session().commit()
+                meta.Session().delete(perm)
+            meta.Session().commit()
             defined_perms_groups = []
         # For every default permission that needs to be created, we check if
         # its group is already defined. If it's not, we create default permission.
-        for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
+        for perm_name in db.Permission.DEFAULT_USER_PERMISSIONS:
             gr = _get_group(perm_name)
             if gr not in defined_perms_groups:
                 log.debug('GR:%s not found, creating permission %s',
                           gr, perm_name)
                 new_perm = _make_perm(perm_name)
-                Session().add(new_perm)
+                meta.Session().add(new_perm)
 
     def update(self, form_result):
-        perm_user = User.get_by_username(username=form_result['perm_user_name'])
+        perm_user = db.User.get_by_username(username=form_result['perm_user_name'])
 
         try:
             # stage 1 set anonymous access
             if perm_user.is_default_user:
-                perm_user.active = str2bool(form_result['anonymous'])
+                perm_user.active = asbool(form_result['anonymous'])
 
             # stage 2 reset defaults and set them from form data
             def _make_new(usr, perm_name):
                 log.debug('Creating new permission:%s', perm_name)
-                new = UserToPerm()
+                new = db.UserToPerm()
                 new.user = usr
-                new.permission = Permission.get_by_key(perm_name)
+                new.permission = db.Permission.get_by_key(perm_name)
                 return new
             # clear current entries, to make this function idempotent
             # it will fix even if we define more permissions or permissions
             # are somehow missing
-            u2p = UserToPerm.query() \
-                .filter(UserToPerm.user == perm_user) \
+            u2p = db.UserToPerm.query() \
+                .filter(db.UserToPerm.user == perm_user) \
                 .all()
             for p in u2p:
-                Session().delete(p)
+                meta.Session().delete(p)
             # create fresh set of permissions
             for def_perm_key in ['default_repo_perm',
                                  'default_group_perm',
                                  'default_user_group_perm',
                                  'default_repo_create',
-                                 'create_on_write', # special case for create repos on write access to group
-                                 #'default_repo_group_create', # not implemented yet
                                  'default_user_group_create',
                                  'default_fork',
                                  'default_register',
                                  'default_extern_activate']:
                 p = _make_new(perm_user, form_result[def_perm_key])
-                Session().add(p)
+                meta.Session().add(p)
 
             # stage 3 update all default permissions for repos if checked
             if form_result['overwrite_default_repo']:
                 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
-                _def = Permission.get_by_key('repository.' + _def_name)
+                _def = db.Permission.get_by_key('repository.' + _def_name)
                 # repos
-                for r2p in UserRepoToPerm.query() \
-                               .filter(UserRepoToPerm.user == perm_user) \
+                for r2p in db.UserRepoToPerm.query() \
+                               .filter(db.UserRepoToPerm.user == perm_user) \
                                .all():
 
                     # don't reset PRIVATE repositories
@@ -144,23 +142,23 @@
             if form_result['overwrite_default_group']:
                 _def_name = form_result['default_group_perm'].split('group.')[-1]
                 # groups
-                _def = Permission.get_by_key('group.' + _def_name)
-                for g2p in UserRepoGroupToPerm.query() \
-                               .filter(UserRepoGroupToPerm.user == perm_user) \
+                _def = db.Permission.get_by_key('group.' + _def_name)
+                for g2p in db.UserRepoGroupToPerm.query() \
+                               .filter(db.UserRepoGroupToPerm.user == perm_user) \
                                .all():
                     g2p.permission = _def
 
             if form_result['overwrite_default_user_group']:
                 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
                 # groups
-                _def = Permission.get_by_key('usergroup.' + _def_name)
-                for g2p in UserUserGroupToPerm.query() \
-                               .filter(UserUserGroupToPerm.user == perm_user) \
+                _def = db.Permission.get_by_key('usergroup.' + _def_name)
+                for g2p in db.UserUserGroupToPerm.query() \
+                               .filter(db.UserUserGroupToPerm.user == perm_user) \
                                .all():
                     g2p.permission = _def
 
-            Session().commit()
+            meta.Session().commit()
         except (DatabaseError,):
             log.error(traceback.format_exc())
-            Session().rollback()
+            meta.Session().rollback()
             raise
--- a/kallithea/model/pull_request.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/pull_request.py	Thu May 27 21:27:37 2021 +0200
@@ -32,11 +32,10 @@
 from tg import request
 from tg.i18n import ugettext as _
 
-from kallithea.lib import helpers as h
-from kallithea.lib.utils2 import ascii_bytes, extract_mentioned_users
-from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, User
-from kallithea.model.meta import Session
-from kallithea.model.notification import NotificationModel
+from kallithea.lib import auth, hooks, webutils
+from kallithea.lib.utils import extract_mentioned_users
+from kallithea.lib.utils2 import ascii_bytes, short_ref_name
+from kallithea.model import changeset_status, comment, db, meta, notification
 
 
 log = logging.getLogger(__name__)
@@ -58,22 +57,27 @@
             mention_recipients = set(mention_recipients) - reviewers
             _assert_valid_reviewers(mention_recipients)
 
-        # members
+        redundant_reviewers = set(db.User.query() \
+            .join(db.PullRequestReviewer) \
+            .filter(db.PullRequestReviewer.pull_request == pr) \
+            .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers))
+            .all())
+
+        if redundant_reviewers:
+            log.debug('Following reviewers were already part of pull request %s: %s', pr.pull_request_id, redundant_reviewers)
+
+            reviewers -= redundant_reviewers
+
+        log.debug('Adding reviewers to pull request %s: %s', pr.pull_request_id, reviewers)
         for reviewer in reviewers:
-            prr = PullRequestReviewer(reviewer, pr)
-            Session().add(prr)
+            prr = db.PullRequestReviewer(reviewer, pr)
+            meta.Session().add(prr)
 
         # notification to reviewers
         pr_url = pr.url(canonical=True)
         threading = ['%s-pr-%s@%s' % (pr.other_repo.repo_name,
                                       pr.pull_request_id,
-                                      h.canonical_hostname())]
-        subject = h.link_to(
-            _('%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s') %
-                {'user': user.username,
-                 'pr_title': pr.title,
-                 'pr_nice_id': pr.nice_id()},
-            pr_url)
+                                      webutils.canonical_hostname())]
         body = pr.description
         _org_ref_type, org_ref_name, _org_rev = pr.org_ref.split(':')
         _other_ref_type, other_ref_name, _other_rev = pr.other_ref.split(':')
@@ -81,18 +85,18 @@
                          for x in map(pr.org_repo.get_changeset, pr.revisions)]
         email_kwargs = {
             'pr_title': pr.title,
-            'pr_title_short': h.shorter(pr.title, 50),
+            'pr_title_short': webutils.shorter(pr.title, 50),
             'pr_user_created': user.full_name_and_username,
-            'pr_repo_url': h.canonical_url('summary_home', repo_name=pr.other_repo.repo_name),
+            'pr_repo_url': webutils.canonical_url('summary_home', repo_name=pr.other_repo.repo_name),
             'pr_url': pr_url,
             'pr_revisions': revision_data,
             'repo_name': pr.other_repo.repo_name,
             'org_repo_name': pr.org_repo.repo_name,
             'pr_nice_id': pr.nice_id(),
-            'pr_target_repo': h.canonical_url('summary_home',
+            'pr_target_repo': webutils.canonical_url('summary_home',
                                repo_name=pr.other_repo.repo_name),
             'pr_target_branch': other_ref_name,
-            'pr_source_repo': h.canonical_url('summary_home',
+            'pr_source_repo': webutils.canonical_url('summary_home',
                                repo_name=pr.org_repo.repo_name),
             'pr_source_branch': org_ref_name,
             'pr_owner': pr.owner,
@@ -102,20 +106,20 @@
             'is_mention': False,
             }
         if reviewers:
-            NotificationModel().create(created_by=user, subject=subject, body=body,
+            notification.NotificationModel().create(created_by=user, body=body,
                                        recipients=reviewers,
-                                       type_=NotificationModel.TYPE_PULL_REQUEST,
+                                       type_=notification.NotificationModel.TYPE_PULL_REQUEST,
                                        email_kwargs=email_kwargs)
 
         if mention_recipients:
             email_kwargs['is_mention'] = True
-            subject = _('[Mention]') + ' ' + subject
-            # FIXME: this subject is wrong and unused!
-            NotificationModel().create(created_by=user, subject=subject, body=body,
+            notification.NotificationModel().create(created_by=user, body=body,
                                        recipients=mention_recipients,
-                                       type_=NotificationModel.TYPE_PULL_REQUEST,
+                                       type_=notification.NotificationModel.TYPE_PULL_REQUEST,
                                        email_kwargs=email_kwargs)
 
+        return reviewers, redundant_reviewers
+
     def mention_from_description(self, user, pr, old_description=''):
         mention_recipients = (extract_mentioned_users(pr.description) -
                               extract_mentioned_users(old_description))
@@ -128,14 +132,14 @@
         if not reviewers:
             return # avoid SQLAlchemy warning about empty sequence for IN-predicate
 
-        PullRequestReviewer.query() \
+        db.PullRequestReviewer.query() \
             .filter_by(pull_request=pull_request) \
-            .filter(PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) \
+            .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) \
             .delete(synchronize_session='fetch') # the default of 'evaluate' is not available
 
     def delete(self, pull_request):
-        pull_request = PullRequest.guess_instance(pull_request)
-        Session().delete(pull_request)
+        pull_request = db.PullRequest.guess_instance(pull_request)
+        meta.Session().delete(pull_request)
         if pull_request.org_repo.scm_instance.alias == 'git':
             # remove a ref under refs/pull/ so that commits can be garbage-collected
             try:
@@ -144,8 +148,8 @@
                 pass
 
     def close_pull_request(self, pull_request):
-        pull_request = PullRequest.guess_instance(pull_request)
-        pull_request.status = PullRequest.STATUS_CLOSED
+        pull_request = db.PullRequest.guess_instance(pull_request)
+        pull_request.status = db.PullRequest.STATUS_CLOSED
         pull_request.updated_on = datetime.datetime.now()
 
 
@@ -169,22 +173,21 @@
         information needed for such a check, rather than a full command
         object.
         """
-        if (h.HasRepoPermissionLevel('read')(org_repo.repo_name) and
-            h.HasRepoPermissionLevel('read')(other_repo.repo_name)
+        if (auth.HasRepoPermissionLevel('read')(org_repo.repo_name) and
+            auth.HasRepoPermissionLevel('read')(other_repo.repo_name)
         ):
             return True
 
         return False
 
     def __init__(self, org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers):
-        from kallithea.controllers.compare import CompareController
         reviewers = set(reviewers)
         _assert_valid_reviewers(reviewers)
 
         (org_ref_type,
          org_ref_name,
          org_rev) = org_ref.split(':')
-        org_display = h.short_ref(org_ref_type, org_ref_name)
+        org_display = short_ref_name(org_ref_type, org_ref_name)
         if org_ref_type == 'rev':
             cs = org_repo.scm_instance.get_changeset(org_rev)
             org_ref = 'branch:%s:%s' % (cs.branch, cs.raw_id)
@@ -196,13 +199,10 @@
             cs = other_repo.scm_instance.get_changeset(other_rev)
             other_ref_name = cs.raw_id[:12]
             other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, cs.raw_id)
-        other_display = h.short_ref(other_ref_type, other_ref_name)
+        other_display = short_ref_name(other_ref_type, other_ref_name)
 
         cs_ranges, _cs_ranges_not, ancestor_revs = \
-            CompareController._get_changesets(org_repo.scm_instance.alias,
-                                              other_repo.scm_instance, other_rev, # org and other "swapped"
-                                              org_repo.scm_instance, org_rev,
-                                              )
+            org_repo.scm_instance.get_diff_changesets(other_rev, org_repo.scm_instance, org_rev) # org and other "swapped"
         if not cs_ranges:
             raise self.Empty(_('Cannot create empty pull request'))
 
@@ -243,9 +243,9 @@
             raise self.Unauthorized(_('You are not authorized to create the pull request'))
 
     def execute(self):
-        created_by = User.get(request.authuser.user_id)
+        created_by = db.User.get(request.authuser.user_id)
 
-        pr = PullRequest()
+        pr = db.PullRequest()
         pr.org_repo = self.org_repo
         pr.org_ref = self.org_ref
         pr.other_repo = self.other_repo
@@ -254,35 +254,35 @@
         pr.title = self.title
         pr.description = self.description
         pr.owner = self.owner
-        Session().add(pr)
-        Session().flush() # make database assign pull_request_id
+        meta.Session().add(pr)
+        meta.Session().flush() # make database assign pull_request_id
 
         if self.org_repo.scm_instance.alias == 'git':
             # create a ref under refs/pull/ so that commits don't get garbage-collected
             self.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pr.pull_request_id] = ascii_bytes(self.org_rev)
 
         # reset state to under-review
-        from kallithea.model.changeset_status import ChangesetStatusModel
-        from kallithea.model.comment import ChangesetCommentsModel
-        comment = ChangesetCommentsModel().create(
+        new_comment = comment.ChangesetCommentsModel().create(
             text='',
             repo=self.org_repo,
             author=created_by,
             pull_request=pr,
             send_email=False,
-            status_change=ChangesetStatus.STATUS_UNDER_REVIEW,
+            status_change=db.ChangesetStatus.STATUS_UNDER_REVIEW,
         )
-        ChangesetStatusModel().set_status(
+        changeset_status.ChangesetStatusModel().set_status(
             self.org_repo,
-            ChangesetStatus.STATUS_UNDER_REVIEW,
+            db.ChangesetStatus.STATUS_UNDER_REVIEW,
             created_by,
-            comment,
+            new_comment,
             pull_request=pr,
         )
 
         mention_recipients = extract_mentioned_users(self.description)
         PullRequestModel().add_reviewers(created_by, pr, self.reviewers, mention_recipients)
 
+        hooks.log_create_pullrequest(pr.get_dict(), created_by)
+
         return pr
 
 
@@ -293,7 +293,7 @@
         information needed for such a check, rather than a full command
         object.
         """
-        if h.HasPermissionAny('hg.admin')():
+        if auth.HasPermissionAny('hg.admin')():
             return True
 
         # Authorized to edit the old PR?
@@ -329,7 +329,7 @@
         lost = old_revisions.difference(revisions)
 
         infos = ['This is a new iteration of %s "%s".' %
-                 (h.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
+                 (webutils.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
                       pull_request_id=old_pull_request.pull_request_id),
                   old_pull_request.title)]
 
@@ -338,21 +338,21 @@
             for r in old_pull_request.revisions:
                 if r in lost:
                     rev_desc = org_repo.get_changeset(r).message.split('\n')[0]
-                    infos.append('  %s %s' % (h.short_id(r), rev_desc))
+                    infos.append('  %s %s' % (r[:12], rev_desc))
 
         if new_revisions:
             infos.append(_('New changesets on %s %s since the previous iteration:') % (org_ref_type, org_ref_name))
             for r in reversed(revisions):
                 if r in new_revisions:
                     rev_desc = org_repo.get_changeset(r).message.split('\n')[0]
-                    infos.append('  %s %s' % (h.short_id(r), h.shorter(rev_desc, 80)))
+                    infos.append('  %s %s' % (r[:12], webutils.shorter(rev_desc, 80)))
 
             if self.create_action.other_ref == old_pull_request.other_ref:
                 infos.append(_("Ancestor didn't change - diff since previous iteration:"))
-                infos.append(h.canonical_url('compare_url',
+                infos.append(webutils.canonical_url('compare_url',
                                  repo_name=org_repo.repo_name, # other_repo is always same as repo_name
-                                 org_ref_type='rev', org_ref_name=h.short_id(org_rev), # use old org_rev as base
-                                 other_ref_type='rev', other_ref_name=h.short_id(new_org_rev),
+                                 org_ref_type='rev', org_ref_name=org_rev[:12], # use old org_rev as base
+                                 other_ref_type='rev', other_ref_name=new_org_rev[:12],
                                  )) # note: linear diff, merge or not doesn't matter
             else:
                 infos.append(_('This iteration is based on another %s revision and there is no simple diff.') % other_ref_name)
@@ -381,8 +381,7 @@
         pull_request = self.create_action.execute()
 
         # Close old iteration
-        from kallithea.model.comment import ChangesetCommentsModel
-        ChangesetCommentsModel().create(
+        comment.ChangesetCommentsModel().create(
             text=_('Closed, next iteration: %s .') % pull_request.url(canonical=True),
             repo=self.old_pull_request.other_repo_id,
             author=request.authuser.user_id,
--- a/kallithea/model/repo.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/repo.py	Thu May 27 21:27:37 2021 +0200
@@ -33,15 +33,13 @@
 from datetime import datetime
 
 import kallithea.lib.utils2
-from kallithea.lib import helpers as h
+from kallithea.lib import celerylib, hooks, webutils
 from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel
 from kallithea.lib.exceptions import AttachedForksError
-from kallithea.lib.hooks import log_delete_repository
 from kallithea.lib.utils import is_valid_repo_uri, make_ui
 from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix
 from kallithea.lib.vcs.backends import get_backend
-from kallithea.model.db import (URL_SEP, Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm,
-                                UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm)
+from kallithea.model import db, meta, scm, userlog
 
 
 log = logging.getLogger(__name__)
@@ -49,12 +47,10 @@
 
 class RepoModel(object):
 
-    URL_SEPARATOR = URL_SEP
-
     def _create_default_perms(self, repository, private):
         # create default permission
         default = 'repository.read'
-        def_user = User.get_default_user()
+        def_user = db.User.get_default_user()
         for p in def_user.user_perms:
             if p.permission.permission_name.startswith('repository.'):
                 default = p.permission.permission_name
@@ -62,12 +58,12 @@
 
         default_perm = 'repository.none' if private else default
 
-        repo_to_perm = UserRepoToPerm()
-        repo_to_perm.permission = Permission.get_by_key(default_perm)
+        repo_to_perm = db.UserRepoToPerm()
+        repo_to_perm.permission = db.Permission.get_by_key(default_perm)
 
         repo_to_perm.repository = repository
         repo_to_perm.user_id = def_user.user_id
-        Session().add(repo_to_perm)
+        meta.Session().add(repo_to_perm)
 
         return repo_to_perm
 
@@ -77,47 +73,39 @@
         Gets the repositories root path from database
         """
 
-        q = Ui.query().filter(Ui.ui_key == '/').one()
+        q = db.Ui.query().filter(db.Ui.ui_key == '/').one()
         return q.ui_value
 
     def get(self, repo_id):
-        repo = Repository.query() \
-            .filter(Repository.repo_id == repo_id)
+        repo = db.Repository.query() \
+            .filter(db.Repository.repo_id == repo_id)
         return repo.scalar()
 
     def get_repo(self, repository):
-        return Repository.guess_instance(repository)
+        return db.Repository.guess_instance(repository)
 
     def get_by_repo_name(self, repo_name):
-        repo = Repository.query() \
-            .filter(Repository.repo_name == repo_name)
+        repo = db.Repository.query() \
+            .filter(db.Repository.repo_name == repo_name)
         return repo.scalar()
 
-    def get_all_user_repos(self, user):
-        """
-        Gets all repositories that user have at least read access
-
-        :param user:
-        """
-        from kallithea.lib.auth import AuthUser
-        auth_user = AuthUser(dbuser=User.guess_instance(user))
-        repos = [repo_name
-            for repo_name, perm in auth_user.permissions['repositories'].items()
-            if perm in ['repository.read', 'repository.write', 'repository.admin']
-            ]
-        return Repository.query().filter(Repository.repo_name.in_(repos))
-
     @classmethod
     def _render_datatable(cls, tmpl, *args, **kwargs):
-        from tg import tmpl_context as c, request, app_globals
+        from tg import app_globals, request
+        from tg import tmpl_context as c
         from tg.i18n import ugettext as _
 
         _tmpl_lookup = app_globals.mako_lookup
         template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 
         tmpl = template.get_def(tmpl)
-        kwargs.update(dict(_=_, h=h, c=c, request=request))
-        return tmpl.render_unicode(*args, **kwargs)
+        return tmpl.render_unicode(
+            *args,
+            _=_,
+            webutils=webutils,
+            c=c,
+            request=request,
+            **kwargs)
 
     def get_repos_as_dict(self, repos_list, repo_groups_list=None,
                           admin=False,
@@ -128,8 +116,8 @@
         admin: return data for action column.
         """
         _render = self._render_datatable
-        from tg import tmpl_context as c, request
-        from kallithea.model.scm import ScmModel
+        from tg import request
+        from tg import tmpl_context as c
 
         def repo_lnk(name, rtype, rstate, private, fork_of):
             return _render('repo_name', name, rtype, rstate, private, fork_of,
@@ -153,7 +141,7 @@
                            cs_cache.get('message'))
 
         def desc(desc):
-            return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels)
+            return webutils.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels)
 
         def state(repo_state):
             return _render("repo_state", repo_state)
@@ -168,8 +156,8 @@
 
         for gr in repo_groups_list or []:
             repos_data.append(dict(
-                raw_name='\0' + h.html_escape(gr.name), # sort before repositories
-                just_name=h.html_escape(gr.name),
+                raw_name='\0' + webutils.html_escape(gr.name),  # sort before repositories
+                just_name=webutils.html_escape(gr.name),
                 name=_render('group_name_html', group_name=gr.group_name, name=gr.name),
                 desc=desc(gr.group_description)))
 
@@ -178,20 +166,20 @@
                 continue
             cs_cache = repo.changeset_cache
             row = {
-                "raw_name": h.html_escape(repo.repo_name),
-                "just_name": h.html_escape(repo.just_name),
+                "raw_name": webutils.html_escape(repo.repo_name),
+                "just_name": webutils.html_escape(repo.just_name),
                 "name": repo_lnk(repo.repo_name, repo.repo_type,
                                  repo.repo_state, repo.private, repo.fork),
                 "following": following(
                     repo.repo_id,
-                    ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id),
+                    scm.ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id),
                 ),
                 "last_change_iso": repo.last_db_change.isoformat(),
                 "last_change": last_change(repo.last_db_change),
                 "last_changeset": last_rev(repo.repo_name, cs_cache),
                 "last_rev_raw": cs_cache.get('revision'),
                 "desc": desc(repo.description),
-                "owner": h.person(repo.owner),
+                "owner": repo.owner.username,
                 "state": state(repo.repo_state),
                 "rss": rss_lnk(repo.repo_name),
                 "atom": atom_lnk(repo.repo_name),
@@ -199,8 +187,7 @@
             if admin:
                 row.update({
                     "action": repo_actions(repo.repo_name),
-                    "owner": owner_actions(repo.owner_id,
-                                           h.person(repo.owner))
+                    "owner": owner_actions(repo.owner_id, repo.owner.username)
                 })
             repos_data.append(row)
 
@@ -218,7 +205,7 @@
         :param repo_name:
         """
 
-        repo_info = Repository.get_by_repo_name(repo_name)
+        repo_info = db.Repository.get_by_repo_name(repo_name)
 
         if repo_info is None:
             return None
@@ -246,7 +233,7 @@
         if repo_info.owner:
             defaults.update({'owner': repo_info.owner.username})
         else:
-            replacement_user = User.query().filter(User.admin ==
+            replacement_user = db.User.query().filter(db.User.admin ==
                                                    True).first().username
             defaults.update({'owner': replacement_user})
 
@@ -264,14 +251,14 @@
 
     def update(self, repo, **kwargs):
         try:
-            cur_repo = Repository.guess_instance(repo)
+            cur_repo = db.Repository.guess_instance(repo)
             org_repo_name = cur_repo.repo_name
             if 'owner' in kwargs:
-                cur_repo.owner = User.get_by_username(kwargs['owner'])
+                cur_repo.owner = db.User.get_by_username(kwargs['owner'])
 
             if 'repo_group' in kwargs:
                 assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None
-                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
+                cur_repo.group = db.RepoGroup.get(kwargs['repo_group'])
                 cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name)
             log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
             for k in ['repo_enable_downloads',
@@ -303,9 +290,9 @@
                     repo=cur_repo, user='default', perm=EMPTY_PERM
                 )
                 # handle extra fields
-            for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]:
-                k = RepositoryField.un_prefix_key(field)
-                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
+            for field in [k for k in kwargs if k.startswith(db.RepositoryField.PREFIX)]:
+                k = db.RepositoryField.un_prefix_key(field)
+                ex_field = db.RepositoryField.get_by_key_name(key=k, repo=cur_repo)
                 if ex_field:
                     ex_field.field_value = kwargs[field]
 
@@ -323,29 +310,25 @@
                      landing_rev='rev:tip', fork_of=None,
                      copy_fork_permissions=False, enable_statistics=False,
                      enable_downloads=False,
-                     copy_group_permissions=False, state=Repository.STATE_PENDING):
+                     copy_group_permissions=False, state=db.Repository.STATE_PENDING):
         """
         Create repository inside database with PENDING state. This should only be
         executed by create() repo, with exception of importing existing repos.
 
         """
-        from kallithea.model.scm import ScmModel
-
-        owner = User.guess_instance(owner)
-        fork_of = Repository.guess_instance(fork_of)
-        repo_group = RepoGroup.guess_instance(repo_group)
+        owner = db.User.guess_instance(owner)
+        fork_of = db.Repository.guess_instance(fork_of)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
         try:
-            repo_name = repo_name
-            description = description
             # repo name is just a name of repository
             # while repo_name_full is a full qualified name that is combined
             # with name and path of group
             repo_name_full = repo_name
-            repo_name = repo_name.split(URL_SEP)[-1]
+            repo_name = repo_name.split(kallithea.URL_SEP)[-1]
             if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name:
                 raise Exception('invalid repo name %s' % repo_name)
 
-            new_repo = Repository()
+            new_repo = db.Repository()
             new_repo.repo_state = state
             new_repo.enable_statistics = False
             new_repo.repo_name = repo_name_full
@@ -367,48 +350,48 @@
                 parent_repo = fork_of
                 new_repo.fork = parent_repo
 
-            Session().add(new_repo)
+            meta.Session().add(new_repo)
 
             if fork_of and copy_fork_permissions:
                 repo = fork_of
-                user_perms = UserRepoToPerm.query() \
-                    .filter(UserRepoToPerm.repository == repo).all()
-                group_perms = UserGroupRepoToPerm.query() \
-                    .filter(UserGroupRepoToPerm.repository == repo).all()
+                user_perms = db.UserRepoToPerm.query() \
+                    .filter(db.UserRepoToPerm.repository == repo).all()
+                group_perms = db.UserGroupRepoToPerm.query() \
+                    .filter(db.UserGroupRepoToPerm.repository == repo).all()
 
                 for perm in user_perms:
-                    UserRepoToPerm.create(perm.user, new_repo, perm.permission)
+                    db.UserRepoToPerm.create(perm.user, new_repo, perm.permission)
 
                 for perm in group_perms:
-                    UserGroupRepoToPerm.create(perm.users_group, new_repo,
+                    db.UserGroupRepoToPerm.create(perm.users_group, new_repo,
                                                perm.permission)
 
             elif repo_group and copy_group_permissions:
 
-                user_perms = UserRepoGroupToPerm.query() \
-                    .filter(UserRepoGroupToPerm.group == repo_group).all()
+                user_perms = db.UserRepoGroupToPerm.query() \
+                    .filter(db.UserRepoGroupToPerm.group == repo_group).all()
 
-                group_perms = UserGroupRepoGroupToPerm.query() \
-                    .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
+                group_perms = db.UserGroupRepoGroupToPerm.query() \
+                    .filter(db.UserGroupRepoGroupToPerm.group == repo_group).all()
 
                 for perm in user_perms:
                     perm_name = perm.permission.permission_name.replace('group.', 'repository.')
-                    perm_obj = Permission.get_by_key(perm_name)
-                    UserRepoToPerm.create(perm.user, new_repo, perm_obj)
+                    perm_obj = db.Permission.get_by_key(perm_name)
+                    db.UserRepoToPerm.create(perm.user, new_repo, perm_obj)
 
                 for perm in group_perms:
                     perm_name = perm.permission.permission_name.replace('group.', 'repository.')
-                    perm_obj = Permission.get_by_key(perm_name)
-                    UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
+                    perm_obj = db.Permission.get_by_key(perm_name)
+                    db.UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
 
             else:
                 self._create_default_perms(new_repo, private)
 
             # now automatically start following this repository as owner
-            ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id)
+            scm.ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id)
             # we need to flush here, in order to check if database won't
             # throw any exceptions, create filesystem dirs at the very end
-            Session().flush()
+            meta.Session().flush()
             return new_repo
         except Exception:
             log.error(traceback.format_exc())
@@ -421,8 +404,7 @@
         :param form_data:
         :param cur_user:
         """
-        from kallithea.lib.celerylib import tasks
-        return tasks.create_repo(form_data, cur_user)
+        return create_repo(form_data, cur_user)
 
     def _update_permissions(self, repo, perms_new=None, perms_updates=None,
                             check_perms=True):
@@ -464,8 +446,7 @@
         :param form_data:
         :param cur_user:
         """
-        from kallithea.lib.celerylib import tasks
-        return tasks.create_repo_fork(form_data, cur_user)
+        return create_repo_fork(form_data, cur_user)
 
     def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
         """
@@ -479,7 +460,7 @@
         """
         if not cur_user:
             cur_user = getattr(get_current_authuser(), 'username', None)
-        repo = Repository.guess_instance(repo)
+        repo = db.Repository.guess_instance(repo)
         if repo is not None:
             if forks == 'detach':
                 for r in repo.forks:
@@ -492,12 +473,12 @@
 
             old_repo_dict = repo.get_dict()
             try:
-                Session().delete(repo)
+                meta.Session().delete(repo)
                 if fs_remove:
                     self._delete_filesystem_repo(repo)
                 else:
                     log.debug('skipping removal from filesystem')
-                log_delete_repository(old_repo_dict,
+                hooks.log_delete_repository(old_repo_dict,
                                       deleted_by=cur_user)
             except Exception:
                 log.error(traceback.format_exc())
@@ -512,19 +493,19 @@
         :param user: Instance of User, user_id or username
         :param perm: Instance of Permission, or permission_name
         """
-        user = User.guess_instance(user)
-        repo = Repository.guess_instance(repo)
-        permission = Permission.guess_instance(perm)
+        user = db.User.guess_instance(user)
+        repo = db.Repository.guess_instance(repo)
+        permission = db.Permission.guess_instance(perm)
 
         # check if we have that permission already
-        obj = UserRepoToPerm.query() \
-            .filter(UserRepoToPerm.user == user) \
-            .filter(UserRepoToPerm.repository == repo) \
+        obj = db.UserRepoToPerm.query() \
+            .filter(db.UserRepoToPerm.user == user) \
+            .filter(db.UserRepoToPerm.repository == repo) \
             .scalar()
         if obj is None:
             # create new !
-            obj = UserRepoToPerm()
-            Session().add(obj)
+            obj = db.UserRepoToPerm()
+            meta.Session().add(obj)
         obj.repository = repo
         obj.user = user
         obj.permission = permission
@@ -539,15 +520,15 @@
         :param user: Instance of User, user_id or username
         """
 
-        user = User.guess_instance(user)
-        repo = Repository.guess_instance(repo)
+        user = db.User.guess_instance(user)
+        repo = db.Repository.guess_instance(repo)
 
-        obj = UserRepoToPerm.query() \
-            .filter(UserRepoToPerm.repository == repo) \
-            .filter(UserRepoToPerm.user == user) \
+        obj = db.UserRepoToPerm.query() \
+            .filter(db.UserRepoToPerm.repository == repo) \
+            .filter(db.UserRepoToPerm.user == user) \
             .scalar()
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
             log.debug('Revoked perm on %s on %s', repo, user)
 
     def grant_user_group_permission(self, repo, group_name, perm):
@@ -560,20 +541,20 @@
             or user group name
         :param perm: Instance of Permission, or permission_name
         """
-        repo = Repository.guess_instance(repo)
-        group_name = UserGroup.guess_instance(group_name)
-        permission = Permission.guess_instance(perm)
+        repo = db.Repository.guess_instance(repo)
+        group_name = db.UserGroup.guess_instance(group_name)
+        permission = db.Permission.guess_instance(perm)
 
         # check if we have that permission already
-        obj = UserGroupRepoToPerm.query() \
-            .filter(UserGroupRepoToPerm.users_group == group_name) \
-            .filter(UserGroupRepoToPerm.repository == repo) \
+        obj = db.UserGroupRepoToPerm.query() \
+            .filter(db.UserGroupRepoToPerm.users_group == group_name) \
+            .filter(db.UserGroupRepoToPerm.repository == repo) \
             .scalar()
 
         if obj is None:
             # create new
-            obj = UserGroupRepoToPerm()
-            Session().add(obj)
+            obj = db.UserGroupRepoToPerm()
+            meta.Session().add(obj)
 
         obj.repository = repo
         obj.users_group = group_name
@@ -589,15 +570,15 @@
         :param group_name: Instance of UserGroup, users_group_id,
             or user group name
         """
-        repo = Repository.guess_instance(repo)
-        group_name = UserGroup.guess_instance(group_name)
+        repo = db.Repository.guess_instance(repo)
+        group_name = db.UserGroup.guess_instance(group_name)
 
-        obj = UserGroupRepoToPerm.query() \
-            .filter(UserGroupRepoToPerm.repository == repo) \
-            .filter(UserGroupRepoToPerm.users_group == group_name) \
+        obj = db.UserGroupRepoToPerm.query() \
+            .filter(db.UserGroupRepoToPerm.repository == repo) \
+            .filter(db.UserGroupRepoToPerm.users_group == group_name) \
             .scalar()
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
             log.debug('Revoked perm to %s on %s', repo, group_name)
 
     def delete_stats(self, repo_name):
@@ -606,12 +587,12 @@
 
         :param repo_name:
         """
-        repo = Repository.guess_instance(repo_name)
+        repo = db.Repository.guess_instance(repo_name)
         try:
-            obj = Statistics.query() \
-                .filter(Statistics.repository == repo).scalar()
+            obj = db.Statistics.query() \
+                .filter(db.Statistics.repository == repo).scalar()
             if obj is not None:
-                Session().delete(obj)
+                meta.Session().delete(obj)
         except Exception:
             log.error(traceback.format_exc())
             raise
@@ -625,12 +606,11 @@
         Note: clone_uri is low level and not validated - it might be a file system path used for validated cloning
         """
         from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
-        from kallithea.model.scm import ScmModel
 
         if '/' in repo_name:
             raise ValueError('repo_name must not contain groups got `%s`' % repo_name)
 
-        if isinstance(repo_group, RepoGroup):
+        if isinstance(repo_group, db.RepoGroup):
             new_parent_path = os.sep.join(repo_group.full_path_splitted)
         else:
             new_parent_path = repo_group or ''
@@ -666,7 +646,7 @@
         elif repo_type == 'git':
             repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
             # add kallithea hook into this repo
-            ScmModel().install_git_hooks(repo=repo)
+            scm.ScmModel().install_git_hooks(repo)
         else:
             raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 
@@ -713,3 +693,147 @@
             shutil.move(rm_path, os.path.join(self.repos_path, _d))
         else:
             log.error("Can't find repo to delete in %r", rm_path)
+
+
+@celerylib.task
+def create_repo(form_data, cur_user):
+    cur_user = db.User.guess_instance(cur_user)
+
+    owner = cur_user
+    repo_name = form_data['repo_name']
+    repo_name_full = form_data['repo_name_full']
+    repo_type = form_data['repo_type']
+    description = form_data['repo_description']
+    private = form_data['repo_private']
+    clone_uri = form_data.get('clone_uri')
+    repo_group = form_data['repo_group']
+    landing_rev = form_data['repo_landing_rev']
+    copy_fork_permissions = form_data.get('copy_permissions')
+    copy_group_permissions = form_data.get('repo_copy_permissions')
+    fork_of = form_data.get('fork_parent_id')
+    state = form_data.get('repo_state', db.Repository.STATE_PENDING)
+
+    # repo creation defaults, private and repo_type are filled in form
+    defs = db.Setting.get_default_repo_settings(strip_prefix=True)
+    enable_statistics = defs.get('repo_enable_statistics')
+    enable_downloads = defs.get('repo_enable_downloads')
+
+    try:
+        db_repo = RepoModel()._create_repo(
+            repo_name=repo_name_full,
+            repo_type=repo_type,
+            description=description,
+            owner=owner,
+            private=private,
+            clone_uri=clone_uri,
+            repo_group=repo_group,
+            landing_rev=landing_rev,
+            fork_of=fork_of,
+            copy_fork_permissions=copy_fork_permissions,
+            copy_group_permissions=copy_group_permissions,
+            enable_statistics=enable_statistics,
+            enable_downloads=enable_downloads,
+            state=state
+        )
+
+        userlog.action_logger(cur_user, 'user_created_repo',
+                      form_data['repo_name_full'], '')
+
+        meta.Session().commit()
+        # now create this repo on Filesystem
+        RepoModel()._create_filesystem_repo(
+            repo_name=repo_name,
+            repo_type=repo_type,
+            repo_group=db.RepoGroup.guess_instance(repo_group),
+            clone_uri=clone_uri,
+        )
+        db_repo = db.Repository.get_by_repo_name(repo_name_full)
+        hooks.log_create_repository(db_repo.get_dict(), created_by=owner.username)
+
+        # update repo changeset caches initially
+        db_repo.update_changeset_cache()
+
+        # set new created state
+        db_repo.set_state(db.Repository.STATE_CREATED)
+        meta.Session().commit()
+    except Exception as e:
+        log.warning('Exception %s occurred when forking repository, '
+                    'doing cleanup...' % e)
+        # rollback things manually !
+        db_repo = db.Repository.get_by_repo_name(repo_name_full)
+        if db_repo:
+            db.Repository.delete(db_repo.repo_id)
+            meta.Session().commit()
+            RepoModel()._delete_filesystem_repo(db_repo)
+        raise
+
+
+@celerylib.task
+def create_repo_fork(form_data, cur_user):
+    """
+    Creates a fork of repository using interval VCS methods
+
+    :param form_data:
+    :param cur_user:
+    """
+    base_path = kallithea.CONFIG['base_path']
+    cur_user = db.User.guess_instance(cur_user)
+
+    repo_name = form_data['repo_name']  # fork in this case
+    repo_name_full = form_data['repo_name_full']
+
+    repo_type = form_data['repo_type']
+    owner = cur_user
+    private = form_data['private']
+    clone_uri = form_data.get('clone_uri')
+    repo_group = form_data['repo_group']
+    landing_rev = form_data['landing_rev']
+    copy_fork_permissions = form_data.get('copy_permissions')
+
+    try:
+        fork_of = db.Repository.guess_instance(form_data.get('fork_parent_id'))
+
+        RepoModel()._create_repo(
+            repo_name=repo_name_full,
+            repo_type=repo_type,
+            description=form_data['description'],
+            owner=owner,
+            private=private,
+            clone_uri=clone_uri,
+            repo_group=repo_group,
+            landing_rev=landing_rev,
+            fork_of=fork_of,
+            copy_fork_permissions=copy_fork_permissions
+        )
+        userlog.action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
+                      fork_of.repo_name, '')
+        meta.Session().commit()
+
+        source_repo_path = os.path.join(base_path, fork_of.repo_name)
+
+        # now create this repo on Filesystem
+        RepoModel()._create_filesystem_repo(
+            repo_name=repo_name,
+            repo_type=repo_type,
+            repo_group=db.RepoGroup.guess_instance(repo_group),
+            clone_uri=source_repo_path,
+        )
+        db_repo = db.Repository.get_by_repo_name(repo_name_full)
+        hooks.log_create_repository(db_repo.get_dict(), created_by=owner.username)
+
+        # update repo changeset caches initially
+        db_repo.update_changeset_cache()
+
+        # set new created state
+        db_repo.set_state(db.Repository.STATE_CREATED)
+        meta.Session().commit()
+    except Exception as e:
+        log.warning('Exception %s occurred when forking repository, '
+                    'doing cleanup...' % e)
+        # rollback things manually !
+        db_repo = db.Repository.get_by_repo_name(repo_name_full)
+        if db_repo:
+            db.Repository.delete(db_repo.repo_id)
+            meta.Session().commit()
+            RepoModel()._delete_filesystem_repo(db_repo)
+        raise
--- a/kallithea/model/repo_group.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/repo_group.py	Thu May 27 21:27:37 2021 +0200
@@ -34,8 +34,7 @@
 
 import kallithea.lib.utils2
 from kallithea.lib.utils2 import LazyProperty
-from kallithea.model import db
-from kallithea.model.db import Permission, RepoGroup, Repository, Session, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserRepoGroupToPerm
+from kallithea.model import db, meta, repo
 
 
 log = logging.getLogger(__name__)
@@ -49,24 +48,24 @@
         Gets the repositories root path from database
         """
 
-        q = Ui.get_by_key('paths', '/')
+        q = db.Ui.get_by_key('paths', '/')
         return q.ui_value
 
     def _create_default_perms(self, new_group):
         # create default permission
         default_perm = 'group.read'
-        def_user = User.get_default_user()
+        def_user = db.User.get_default_user()
         for p in def_user.user_perms:
             if p.permission.permission_name.startswith('group.'):
                 default_perm = p.permission.permission_name
                 break
 
-        repo_group_to_perm = UserRepoGroupToPerm()
-        repo_group_to_perm.permission = Permission.get_by_key(default_perm)
+        repo_group_to_perm = db.UserRepoGroupToPerm()
+        repo_group_to_perm.permission = db.Permission.get_by_key(default_perm)
 
         repo_group_to_perm.group = new_group
         repo_group_to_perm.user_id = def_user.user_id
-        Session().add(repo_group_to_perm)
+        meta.Session().add(repo_group_to_perm)
         return repo_group_to_perm
 
     def _create_group(self, group_name):
@@ -116,7 +115,7 @@
         :param group: instance of group from database
         :param force_delete: use shutil rmtree to remove all objects
         """
-        paths = group.full_path.split(db.URL_SEP)
+        paths = group.full_path.split(kallithea.URL_SEP)
         paths = os.sep.join(paths)
 
         rm_path = os.path.join(self.repos_path, paths)
@@ -139,15 +138,15 @@
             if kallithea.lib.utils2.repo_name_slug(group_name) != group_name:
                 raise Exception('invalid repo group name %s' % group_name)
 
-            owner = User.guess_instance(owner)
-            parent_group = RepoGroup.guess_instance(parent)
-            new_repo_group = RepoGroup()
+            owner = db.User.guess_instance(owner)
+            parent_group = db.RepoGroup.guess_instance(parent)
+            new_repo_group = db.RepoGroup()
             new_repo_group.owner = owner
             new_repo_group.group_description = group_description or group_name
             new_repo_group.parent_group = parent_group
             new_repo_group.group_name = new_repo_group.get_new_name(group_name)
 
-            Session().add(new_repo_group)
+            meta.Session().add(new_repo_group)
 
             # create an ADMIN permission for owner except if we're super admin,
             # later owner should go into the owner field of groups
@@ -157,28 +156,28 @@
 
             if parent_group and copy_permissions:
                 # copy permissions from parent
-                user_perms = UserRepoGroupToPerm.query() \
-                    .filter(UserRepoGroupToPerm.group == parent_group).all()
+                user_perms = db.UserRepoGroupToPerm.query() \
+                    .filter(db.UserRepoGroupToPerm.group == parent_group).all()
 
-                group_perms = UserGroupRepoGroupToPerm.query() \
-                    .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
+                group_perms = db.UserGroupRepoGroupToPerm.query() \
+                    .filter(db.UserGroupRepoGroupToPerm.group == parent_group).all()
 
                 for perm in user_perms:
                     # don't copy over the permission for user who is creating
                     # this group, if he is not super admin he get's admin
                     # permission set above
                     if perm.user != owner or owner.is_admin:
-                        UserRepoGroupToPerm.create(perm.user, new_repo_group, perm.permission)
+                        db.UserRepoGroupToPerm.create(perm.user, new_repo_group, perm.permission)
 
                 for perm in group_perms:
-                    UserGroupRepoGroupToPerm.create(perm.users_group, new_repo_group, perm.permission)
+                    db.UserGroupRepoGroupToPerm.create(perm.users_group, new_repo_group, perm.permission)
             else:
                 self._create_default_perms(new_repo_group)
 
             if not just_db:
                 # we need to flush here, in order to check if database won't
                 # throw any exceptions, create filesystem dirs at the very end
-                Session().flush()
+                meta.Session().flush()
                 self._create_group(new_repo_group.group_name)
 
             return new_repo_group
@@ -189,7 +188,6 @@
     def _update_permissions(self, repo_group, perms_new=None,
                             perms_updates=None, recursive=None,
                             check_perms=True):
-        from kallithea.model.repo import RepoModel
         from kallithea.lib.auth import HasUserGroupPermissionLevel
 
         if not perms_new:
@@ -198,10 +196,10 @@
             perms_updates = []
 
         def _set_perm_user(obj, user, perm):
-            if isinstance(obj, RepoGroup):
+            if isinstance(obj, db.RepoGroup):
                 self.grant_user_permission(repo_group=obj, user=user, perm=perm)
-            elif isinstance(obj, Repository):
-                user = User.guess_instance(user)
+            elif isinstance(obj, db.Repository):
+                user = db.User.guess_instance(user)
 
                 # private repos will not allow to change the default permissions
                 # using recursive mode
@@ -211,20 +209,20 @@
                 # we set group permission but we have to switch to repo
                 # permission
                 perm = perm.replace('group.', 'repository.')
-                RepoModel().grant_user_permission(
+                repo.RepoModel().grant_user_permission(
                     repo=obj, user=user, perm=perm
                 )
 
         def _set_perm_group(obj, users_group, perm):
-            if isinstance(obj, RepoGroup):
+            if isinstance(obj, db.RepoGroup):
                 self.grant_user_group_permission(repo_group=obj,
                                                   group_name=users_group,
                                                   perm=perm)
-            elif isinstance(obj, Repository):
+            elif isinstance(obj, db.Repository):
                 # we set group permission but we have to switch to repo
                 # permission
                 perm = perm.replace('group.', 'repository.')
-                RepoModel().grant_user_group_permission(
+                repo.RepoModel().grant_user_group_permission(
                     repo=obj, group_name=users_group, perm=perm
                 )
 
@@ -240,11 +238,11 @@
                 pass
             elif recursive == 'repos':
                 # skip groups, other than this one
-                if isinstance(obj, RepoGroup) and not obj == repo_group:
+                if isinstance(obj, db.RepoGroup) and not obj == repo_group:
                     continue
             elif recursive == 'groups':
                 # skip repos
-                if isinstance(obj, Repository):
+                if isinstance(obj, db.Repository):
                     continue
             else:  # recursive == 'none': # DEFAULT don't apply to iterated objects
                 obj = repo_group
@@ -279,7 +277,7 @@
 
     def update(self, repo_group, repo_group_args):
         try:
-            repo_group = RepoGroup.guess_instance(repo_group)
+            repo_group = db.RepoGroup.guess_instance(repo_group)
             old_path = repo_group.full_path
 
             # change properties
@@ -290,14 +288,14 @@
 
             if 'parent_group_id' in repo_group_args:
                 assert repo_group_args['parent_group_id'] != '-1', repo_group_args  # RepoGroupForm should have converted to None
-                repo_group.parent_group = RepoGroup.get(repo_group_args['parent_group_id'])
+                repo_group.parent_group = db.RepoGroup.get(repo_group_args['parent_group_id'])
             if 'group_name' in repo_group_args:
                 group_name = repo_group_args['group_name']
                 if kallithea.lib.utils2.repo_name_slug(group_name) != group_name:
                     raise Exception('invalid repo group name %s' % group_name)
                 repo_group.group_name = repo_group.get_new_name(group_name)
             new_path = repo_group.full_path
-            Session().add(repo_group)
+            meta.Session().add(repo_group)
 
             # iterate over all members of this groups and do fixes
             # if obj is a repoGroup also fix the name of the group according
@@ -306,12 +304,12 @@
             # this can be potentially heavy operation
             for obj in repo_group.recursive_groups_and_repos():
                 # set the value from it's parent
-                if isinstance(obj, RepoGroup):
+                if isinstance(obj, db.RepoGroup):
                     new_name = obj.get_new_name(obj.name)
                     log.debug('Fixing group %s to new name %s'
                                 % (obj.group_name, new_name))
                     obj.group_name = new_name
-                elif isinstance(obj, Repository):
+                elif isinstance(obj, db.Repository):
                     # we need to get all repositories from this new group and
                     # rename them accordingly to new group path
                     new_name = obj.get_new_name(obj.just_name)
@@ -327,18 +325,17 @@
             raise
 
     def delete(self, repo_group, force_delete=False):
-        repo_group = RepoGroup.guess_instance(repo_group)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
         try:
-            Session().delete(repo_group)
+            meta.Session().delete(repo_group)
             self._delete_group(repo_group, force_delete)
         except Exception:
             log.error('Error removing repo_group %s', repo_group)
             raise
 
     def add_permission(self, repo_group, obj, obj_type, perm, recursive):
-        from kallithea.model.repo import RepoModel
-        repo_group = RepoGroup.guess_instance(repo_group)
-        perm = Permission.guess_instance(perm)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
+        perm = db.Permission.guess_instance(perm)
 
         for el in repo_group.recursive_groups_and_repos():
             # iterated obj is an instance of a repos group or repository in
@@ -347,30 +344,30 @@
                 pass
             elif recursive == 'repos':
                 # skip groups, other than this one
-                if isinstance(el, RepoGroup) and not el == repo_group:
+                if isinstance(el, db.RepoGroup) and not el == repo_group:
                     continue
             elif recursive == 'groups':
                 # skip repos
-                if isinstance(el, Repository):
+                if isinstance(el, db.Repository):
                     continue
             else:  # recursive == 'none': # DEFAULT don't apply to iterated objects
                 el = repo_group
                 # also we do a break at the end of this loop.
 
-            if isinstance(el, RepoGroup):
+            if isinstance(el, db.RepoGroup):
                 if obj_type == 'user':
                     RepoGroupModel().grant_user_permission(el, user=obj, perm=perm)
                 elif obj_type == 'user_group':
                     RepoGroupModel().grant_user_group_permission(el, group_name=obj, perm=perm)
                 else:
                     raise Exception('undefined object type %s' % obj_type)
-            elif isinstance(el, Repository):
+            elif isinstance(el, db.Repository):
                 # for repos we need to hotfix the name of permission
                 _perm = perm.permission_name.replace('group.', 'repository.')
                 if obj_type == 'user':
-                    RepoModel().grant_user_permission(el, user=obj, perm=_perm)
+                    repo.RepoModel().grant_user_permission(el, user=obj, perm=_perm)
                 elif obj_type == 'user_group':
-                    RepoModel().grant_user_group_permission(el, group_name=obj, perm=_perm)
+                    repo.RepoModel().grant_user_group_permission(el, group_name=obj, perm=_perm)
                 else:
                     raise Exception('undefined object type %s' % obj_type)
             else:
@@ -392,8 +389,7 @@
         :param obj_type: user or user group type
         :param recursive: recurse to all children of group
         """
-        from kallithea.model.repo import RepoModel
-        repo_group = RepoGroup.guess_instance(repo_group)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
 
         for el in repo_group.recursive_groups_and_repos():
             # iterated obj is an instance of a repos group or repository in
@@ -402,28 +398,28 @@
                 pass
             elif recursive == 'repos':
                 # skip groups, other than this one
-                if isinstance(el, RepoGroup) and not el == repo_group:
+                if isinstance(el, db.RepoGroup) and not el == repo_group:
                     continue
             elif recursive == 'groups':
                 # skip repos
-                if isinstance(el, Repository):
+                if isinstance(el, db.Repository):
                     continue
             else:  # recursive == 'none': # DEFAULT don't apply to iterated objects
                 el = repo_group
                 # also we do a break at the end of this loop.
 
-            if isinstance(el, RepoGroup):
+            if isinstance(el, db.RepoGroup):
                 if obj_type == 'user':
                     RepoGroupModel().revoke_user_permission(el, user=obj)
                 elif obj_type == 'user_group':
                     RepoGroupModel().revoke_user_group_permission(el, group_name=obj)
                 else:
                     raise Exception('undefined object type %s' % obj_type)
-            elif isinstance(el, Repository):
+            elif isinstance(el, db.Repository):
                 if obj_type == 'user':
-                    RepoModel().revoke_user_permission(el, user=obj)
+                    repo.RepoModel().revoke_user_permission(el, user=obj)
                 elif obj_type == 'user_group':
-                    RepoModel().revoke_user_group_permission(el, group_name=obj)
+                    repo.RepoModel().revoke_user_group_permission(el, group_name=obj)
                 else:
                     raise Exception('undefined object type %s' % obj_type)
             else:
@@ -446,19 +442,19 @@
         :param perm: Instance of Permission, or permission_name
         """
 
-        repo_group = RepoGroup.guess_instance(repo_group)
-        user = User.guess_instance(user)
-        permission = Permission.guess_instance(perm)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
+        user = db.User.guess_instance(user)
+        permission = db.Permission.guess_instance(perm)
 
         # check if we have that permission already
-        obj = UserRepoGroupToPerm.query() \
-            .filter(UserRepoGroupToPerm.user == user) \
-            .filter(UserRepoGroupToPerm.group == repo_group) \
+        obj = db.UserRepoGroupToPerm.query() \
+            .filter(db.UserRepoGroupToPerm.user == user) \
+            .filter(db.UserRepoGroupToPerm.group == repo_group) \
             .scalar()
         if obj is None:
             # create new !
-            obj = UserRepoGroupToPerm()
-            Session().add(obj)
+            obj = db.UserRepoGroupToPerm()
+            meta.Session().add(obj)
         obj.group = repo_group
         obj.user = user
         obj.permission = permission
@@ -474,15 +470,15 @@
         :param user: Instance of User, user_id or username
         """
 
-        repo_group = RepoGroup.guess_instance(repo_group)
-        user = User.guess_instance(user)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
+        user = db.User.guess_instance(user)
 
-        obj = UserRepoGroupToPerm.query() \
-            .filter(UserRepoGroupToPerm.user == user) \
-            .filter(UserRepoGroupToPerm.group == repo_group) \
+        obj = db.UserRepoGroupToPerm.query() \
+            .filter(db.UserRepoGroupToPerm.user == user) \
+            .filter(db.UserRepoGroupToPerm.group == repo_group) \
             .scalar()
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
             log.debug('Revoked perm on %s on %s', repo_group, user)
 
     def grant_user_group_permission(self, repo_group, group_name, perm):
@@ -496,20 +492,20 @@
             or user group name
         :param perm: Instance of Permission, or permission_name
         """
-        repo_group = RepoGroup.guess_instance(repo_group)
-        group_name = UserGroup.guess_instance(group_name)
-        permission = Permission.guess_instance(perm)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
+        group_name = db.UserGroup.guess_instance(group_name)
+        permission = db.Permission.guess_instance(perm)
 
         # check if we have that permission already
-        obj = UserGroupRepoGroupToPerm.query() \
-            .filter(UserGroupRepoGroupToPerm.group == repo_group) \
-            .filter(UserGroupRepoGroupToPerm.users_group == group_name) \
+        obj = db.UserGroupRepoGroupToPerm.query() \
+            .filter(db.UserGroupRepoGroupToPerm.group == repo_group) \
+            .filter(db.UserGroupRepoGroupToPerm.users_group == group_name) \
             .scalar()
 
         if obj is None:
             # create new
-            obj = UserGroupRepoGroupToPerm()
-            Session().add(obj)
+            obj = db.UserGroupRepoGroupToPerm()
+            meta.Session().add(obj)
 
         obj.group = repo_group
         obj.users_group = group_name
@@ -526,13 +522,13 @@
         :param group_name: Instance of UserGroup, users_group_id,
             or user group name
         """
-        repo_group = RepoGroup.guess_instance(repo_group)
-        group_name = UserGroup.guess_instance(group_name)
+        repo_group = db.RepoGroup.guess_instance(repo_group)
+        group_name = db.UserGroup.guess_instance(group_name)
 
-        obj = UserGroupRepoGroupToPerm.query() \
-            .filter(UserGroupRepoGroupToPerm.group == repo_group) \
-            .filter(UserGroupRepoGroupToPerm.users_group == group_name) \
+        obj = db.UserGroupRepoGroupToPerm.query() \
+            .filter(db.UserGroupRepoGroupToPerm.group == repo_group) \
+            .filter(db.UserGroupRepoGroupToPerm.users_group == group_name) \
             .scalar()
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
             log.debug('Revoked perm to %s on %s', repo_group, group_name)
--- a/kallithea/model/scm.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/scm.py	Thu May 27 21:27:37 2021 +0200
@@ -30,24 +30,24 @@
 import posixpath
 import re
 import sys
+import tempfile
 import traceback
 
 import pkg_resources
 from tg.i18n import ugettext as _
 
 import kallithea
-from kallithea import BACKENDS
+from kallithea.lib import hooks
 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel
 from kallithea.lib.exceptions import IMCCommitError, NonRelativePathError
-from kallithea.lib.hooks import process_pushed_raw_ids
-from kallithea.lib.utils import action_logger, get_filesystem_repos, make_ui
-from kallithea.lib.utils2 import safe_bytes, set_hook_environment
-from kallithea.lib.vcs import get_backend
+from kallithea.lib.utils import get_filesystem_repos, make_ui
+from kallithea.lib.utils2 import safe_bytes, safe_str, set_hook_environment, umask
+from kallithea.lib.vcs import get_repo
 from kallithea.lib.vcs.backends.base import EmptyChangeset
-from kallithea.lib.vcs.exceptions import RepositoryError
+from kallithea.lib.vcs.exceptions import RepositoryError, VCSError
 from kallithea.lib.vcs.nodes import FileNode
 from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.model.db import PullRequest, RepoGroup, Repository, Session, Ui, User, UserFollowing, UserLog
+from kallithea.model import db, meta, userlog
 
 
 log = logging.getLogger(__name__)
@@ -136,7 +136,7 @@
     """
 
     def __get_repo(self, instance):
-        cls = Repository
+        cls = db.Repository
         if isinstance(instance, cls):
             return instance
         elif isinstance(instance, int):
@@ -145,9 +145,8 @@
             if instance.isdigit():
                 return cls.get(int(instance))
             return cls.get_by_repo_name(instance)
-        elif instance is not None:
-            raise Exception('given object must be int, basestr or Instance'
-                            ' of %s got %s' % (type(cls), type(instance)))
+        raise Exception('given object must be int, basestr or Instance'
+                        ' of %s got %s' % (type(cls), type(instance)))
 
     @LazyProperty
     def repos_path(self):
@@ -155,7 +154,7 @@
         Gets the repositories root path from database
         """
 
-        q = Ui.query().filter(Ui.ui_key == '/').one()
+        q = db.Ui.query().filter(db.Ui.ui_key == '/').one()
 
         return q.ui_value
 
@@ -173,28 +172,20 @@
 
         log.info('scanning for repositories in %s', repos_path)
 
-        baseui = make_ui()
         repos = {}
 
         for name, path in get_filesystem_repos(repos_path):
             # name need to be decomposed and put back together using the /
             # since this is internal storage separator for kallithea
-            name = Repository.normalize_repo_name(name)
+            name = db.Repository.normalize_repo_name(name)
 
             try:
                 if name in repos:
                     raise RepositoryError('Duplicate repository name %s '
                                           'found in %s' % (name, path))
                 else:
-
-                    klass = get_backend(path[0])
-
-                    if path[0] == 'hg' and path[0] in BACKENDS:
-                        repos[name] = klass(path[1], baseui=baseui)
-
-                    if path[0] == 'git' and path[0] in BACKENDS:
-                        repos[name] = klass(path[1])
-            except OSError:
+                    repos[name] = get_repo(path[1], baseui=make_ui(path[1]))
+            except (OSError, VCSError):
                 continue
         log.debug('found %s paths with repositories', len(repos))
         return repos
@@ -208,8 +199,8 @@
         If no groups are specified, use top level groups.
         """
         if groups is None:
-            groups = RepoGroup.query() \
-                .filter(RepoGroup.parent_group_id == None).all()
+            groups = db.RepoGroup.query() \
+                .filter(db.RepoGroup.parent_group_id == None).all()
         return RepoGroupList(groups, perm_level='read')
 
     def mark_for_invalidation(self, repo_name):
@@ -219,21 +210,21 @@
         :param repo_name: the repo for which caches should be marked invalid
         """
         log.debug("Marking %s as invalidated and update cache", repo_name)
-        repo = Repository.get_by_repo_name(repo_name)
+        repo = db.Repository.get_by_repo_name(repo_name)
         if repo is not None:
             repo.set_invalidate()
             repo.update_changeset_cache()
 
     def toggle_following_repo(self, follow_repo_id, user_id):
 
-        f = UserFollowing.query() \
-            .filter(UserFollowing.follows_repository_id == follow_repo_id) \
-            .filter(UserFollowing.user_id == user_id).scalar()
+        f = db.UserFollowing.query() \
+            .filter(db.UserFollowing.follows_repository_id == follow_repo_id) \
+            .filter(db.UserFollowing.user_id == user_id).scalar()
 
         if f is not None:
             try:
-                Session().delete(f)
-                action_logger(UserTemp(user_id),
+                meta.Session().delete(f)
+                userlog.action_logger(UserTemp(user_id),
                               'stopped_following_repo',
                               RepoTemp(follow_repo_id))
                 return
@@ -242,12 +233,12 @@
                 raise
 
         try:
-            f = UserFollowing()
+            f = db.UserFollowing()
             f.user_id = user_id
             f.follows_repository_id = follow_repo_id
-            Session().add(f)
+            meta.Session().add(f)
 
-            action_logger(UserTemp(user_id),
+            userlog.action_logger(UserTemp(user_id),
                           'started_following_repo',
                           RepoTemp(follow_repo_id))
         except Exception:
@@ -255,62 +246,62 @@
             raise
 
     def toggle_following_user(self, follow_user_id, user_id):
-        f = UserFollowing.query() \
-            .filter(UserFollowing.follows_user_id == follow_user_id) \
-            .filter(UserFollowing.user_id == user_id).scalar()
+        f = db.UserFollowing.query() \
+            .filter(db.UserFollowing.follows_user_id == follow_user_id) \
+            .filter(db.UserFollowing.user_id == user_id).scalar()
 
         if f is not None:
             try:
-                Session().delete(f)
+                meta.Session().delete(f)
                 return
             except Exception:
                 log.error(traceback.format_exc())
                 raise
 
         try:
-            f = UserFollowing()
+            f = db.UserFollowing()
             f.user_id = user_id
             f.follows_user_id = follow_user_id
-            Session().add(f)
+            meta.Session().add(f)
         except Exception:
             log.error(traceback.format_exc())
             raise
 
     def is_following_repo(self, repo_name, user_id):
-        r = Repository.query() \
-            .filter(Repository.repo_name == repo_name).scalar()
+        r = db.Repository.query() \
+            .filter(db.Repository.repo_name == repo_name).scalar()
 
-        f = UserFollowing.query() \
-            .filter(UserFollowing.follows_repository == r) \
-            .filter(UserFollowing.user_id == user_id).scalar()
+        f = db.UserFollowing.query() \
+            .filter(db.UserFollowing.follows_repository == r) \
+            .filter(db.UserFollowing.user_id == user_id).scalar()
 
         return f is not None
 
     def is_following_user(self, username, user_id):
-        u = User.get_by_username(username)
+        u = db.User.get_by_username(username)
 
-        f = UserFollowing.query() \
-            .filter(UserFollowing.follows_user == u) \
-            .filter(UserFollowing.user_id == user_id).scalar()
+        f = db.UserFollowing.query() \
+            .filter(db.UserFollowing.follows_user == u) \
+            .filter(db.UserFollowing.user_id == user_id).scalar()
 
         return f is not None
 
     def get_followers(self, repo):
-        repo = Repository.guess_instance(repo)
+        repo = db.Repository.guess_instance(repo)
 
-        return UserFollowing.query() \
-                .filter(UserFollowing.follows_repository == repo).count()
+        return db.UserFollowing.query() \
+                .filter(db.UserFollowing.follows_repository == repo).count()
 
     def get_forks(self, repo):
-        repo = Repository.guess_instance(repo)
-        return Repository.query() \
-                .filter(Repository.fork == repo).count()
+        repo = db.Repository.guess_instance(repo)
+        return db.Repository.query() \
+                .filter(db.Repository.fork == repo).count()
 
     def get_pull_requests(self, repo):
-        repo = Repository.guess_instance(repo)
-        return PullRequest.query() \
-                .filter(PullRequest.other_repo == repo) \
-                .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
+        repo = db.Repository.guess_instance(repo)
+        return db.PullRequest.query() \
+                .filter(db.PullRequest.other_repo == repo) \
+                .filter(db.PullRequest.status != db.PullRequest.STATUS_CLOSED).count()
 
     def mark_as_fork(self, repo, fork, user):
         repo = self.__get_repo(repo)
@@ -336,24 +327,7 @@
         :param revisions: list of revisions that we pushed
         """
         set_hook_environment(username, ip_addr, repo_name, repo_alias=repo.alias, action=action)
-        process_pushed_raw_ids(revisions) # also calls mark_for_invalidation
-
-    def _get_IMC_module(self, scm_type):
-        """
-        Returns InMemoryCommit class based on scm_type
-
-        :param scm_type:
-        """
-        if scm_type == 'hg':
-            from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset
-            return MercurialInMemoryChangeset
-
-        if scm_type == 'git':
-            from kallithea.lib.vcs.backends.git import GitInMemoryChangeset
-            return GitInMemoryChangeset
-
-        raise Exception('Invalid scm_type, must be one of hg,git got %s'
-                        % (scm_type,))
+        hooks.process_pushed_raw_ids(revisions) # also calls mark_for_invalidation
 
     def pull_changes(self, repo, username, ip_addr, clone_uri=None):
         """
@@ -394,9 +368,8 @@
 
         :param repo: a db_repo.scm_instance
         """
-        user = User.guess_instance(user)
-        IMC = self._get_IMC_module(repo.alias)
-        imc = IMC(repo)
+        user = db.User.guess_instance(user)
+        imc = repo.in_memory_changeset
         imc.change(FileNode(f_path, content, mode=cs.get_file_mode(f_path)))
         try:
             tip = imc.commit(message=message, author=author,
@@ -466,7 +439,7 @@
         :returns: new committed changeset
         """
 
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
         scm_instance = repo.scm_instance_no_cache()
 
         processed_nodes = []
@@ -477,14 +450,10 @@
                 content = content.read()
             processed_nodes.append((f_path, content))
 
-        message = message
         committer = user.full_contact
         if not author:
             author = committer
 
-        IMC = self._get_IMC_module(scm_instance.alias)
-        imc = IMC(scm_instance)
-
         if not parent_cs:
             parent_cs = EmptyChangeset(alias=scm_instance.alias)
 
@@ -494,6 +463,7 @@
         else:
             parents = [parent_cs]
         # add multiple nodes
+        imc = scm_instance.in_memory_changeset
         for path, content in processed_nodes:
             imc.add(FileNode(path, content=content))
 
@@ -518,17 +488,13 @@
         """
         Commits specified nodes to repo. Again.
         """
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
         scm_instance = repo.scm_instance_no_cache()
 
-        message = message
         committer = user.full_contact
         if not author:
             author = committer
 
-        imc_class = self._get_IMC_module(scm_instance.alias)
-        imc = imc_class(scm_instance)
-
         if not parent_cs:
             parent_cs = EmptyChangeset(alias=scm_instance.alias)
 
@@ -539,6 +505,7 @@
             parents = [parent_cs]
 
         # add multiple nodes
+        imc = scm_instance.in_memory_changeset
         for _filename, data in nodes.items():
             # new filename, can be renamed from the old one
             filename = self._sanitize_path(data['filename'])
@@ -591,7 +558,7 @@
         :returns: new committed changeset after deletion
         """
 
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
         scm_instance = repo.scm_instance_no_cache()
 
         processed_nodes = []
@@ -602,14 +569,10 @@
             content = nodes[f_path].get('content')
             processed_nodes.append((f_path, content))
 
-        message = message
         committer = user.full_contact
         if not author:
             author = committer
 
-        IMC = self._get_IMC_module(scm_instance.alias)
-        imc = IMC(scm_instance)
-
         if not parent_cs:
             parent_cs = EmptyChangeset(alias=scm_instance.alias)
 
@@ -619,6 +582,7 @@
         else:
             parents = [parent_cs]
         # add multiple nodes
+        imc = scm_instance.in_memory_changeset
         for path, content in processed_nodes:
             imc.remove(FileNode(path, content=content))
 
@@ -639,7 +603,7 @@
         return tip
 
     def get_unread_journal(self):
-        return UserLog.query().count()
+        return db.UserLog.query().count()
 
     def get_repo_landing_revs(self, repo=None):
         """
@@ -651,12 +615,12 @@
 
         hist_l = []
         choices = []
-        repo = self.__get_repo(repo)
         hist_l.append(('rev:tip', _('latest tip')))
         choices.append('rev:tip')
         if repo is None:
             return choices, hist_l
 
+        repo = self.__get_repo(repo)
         repo = repo.scm_instance
 
         branches_group = ([('branch:%s' % k, k) for k, v in
@@ -691,77 +655,81 @@
                 or sys.executable
                 or '/usr/bin/env python3')
 
-    def install_git_hooks(self, repo, force_create=False):
+    def install_git_hooks(self, repo, force=False):
         """
         Creates a kallithea hook inside a git repository
 
         :param repo: Instance of VCS repo
-        :param force_create: Create even if same name hook exists
+        :param force: Overwrite existing non-Kallithea hooks
         """
 
-        loc = os.path.join(repo.path, 'hooks')
+        hooks_path = os.path.join(repo.path, 'hooks')
         if not repo.bare:
-            loc = os.path.join(repo.path, '.git', 'hooks')
-        if not os.path.isdir(loc):
-            os.makedirs(loc)
+            hooks_path = os.path.join(repo.path, '.git', 'hooks')
+        if not os.path.isdir(hooks_path):
+            os.makedirs(hooks_path)
 
         tmpl_post = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter())
         tmpl_post += pkg_resources.resource_string(
-            'kallithea', os.path.join('config', 'post_receive_tmpl.py')
-        )
-        tmpl_pre = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter())
-        tmpl_pre += pkg_resources.resource_string(
-            'kallithea', os.path.join('config', 'pre_receive_tmpl.py')
+            'kallithea', os.path.join('templates', 'py', 'git_post_receive_hook.py')
         )
 
-        for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
-            _hook_file = os.path.join(loc, '%s-receive' % h_type)
-            has_hook = False
-            log.debug('Installing git hook in repo %s', repo)
-            if os.path.exists(_hook_file):
-                # let's take a look at this hook, maybe it's kallithea ?
-                log.debug('hook exists, checking if it is from kallithea')
-                with open(_hook_file, 'rb') as f:
+        for h_type, tmpl in [('pre-receive', None), ('post-receive', tmpl_post)]:
+            hook_file = os.path.join(hooks_path, h_type)
+            other_hook = False
+            log.debug('Installing git hook %s in repo %s', h_type, repo.path)
+            if os.path.islink(hook_file):
+                log.debug("Found symlink hook at %s", hook_file)
+                other_hook = True
+            elif os.path.isfile(hook_file):
+                log.debug('hook file %s exists, checking if it is from kallithea', hook_file)
+                with open(hook_file, 'rb') as f:
                     data = f.read()
                     matches = re.search(br'^KALLITHEA_HOOK_VER\s*=\s*(.*)$', data, flags=re.MULTILINE)
                     if matches:
-                        try:
-                            ver = matches.groups()[0]
-                            log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %r', ver)
-                            has_hook = True
-                        except Exception:
-                            log.error(traceback.format_exc())
-            else:
-                # there is no hook in this dir, so we want to create one
-                has_hook = True
-
-            if has_hook or force_create:
-                log.debug('writing %s hook file !', h_type)
+                        ver = safe_str(matches.group(1))
+                        log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %s', ver)
+                    else:
+                        log.debug('Found non-Kallithea hook at %s', hook_file)
+                        other_hook = True
+            elif os.path.exists(hook_file):
+                log.debug("Found hook that isn't a regular file at %s", hook_file)
+                other_hook = True
+            if other_hook and not force:
+                log.warning('skipping overwriting hook file %s', hook_file)
+            elif h_type == 'post-receive':
+                log.debug('writing hook file %s', hook_file)
+                if other_hook:
+                    backup_file = hook_file + '.bak'
+                    log.warning('moving existing hook to %s', backup_file)
+                    os.rename(hook_file, backup_file)
                 try:
-                    with open(_hook_file, 'wb') as f:
-                        tmpl = tmpl.replace(b'_TMPL_', safe_bytes(kallithea.__version__))
-                        f.write(tmpl)
-                    os.chmod(_hook_file, 0o755)
-                except IOError as e:
-                    log.error('error writing %s: %s', _hook_file, e)
-            else:
-                log.debug('skipping writing hook file')
+                    fh, fn = tempfile.mkstemp(prefix=hook_file + '.tmp.')
+                    os.write(fh, tmpl.replace(b'_TMPL_', safe_bytes(kallithea.__version__)))
+                    os.close(fh)
+                    os.chmod(fn, 0o777 & ~umask)
+                    os.rename(fn, hook_file)
+                except (OSError, IOError) as e:
+                    log.error('error writing hook %s: %s', hook_file, e)
+            elif h_type == 'pre-receive':  # no longer used, so just remove any existing Kallithea hook
+                if os.path.lexists(hook_file) and not other_hook:
+                    os.remove(hook_file)
 
 
-def AvailableRepoGroupChoices(top_perms, repo_group_perm_level, extras=()):
+def AvailableRepoGroupChoices(repo_group_perm_level, extras=()):
     """Return group_id,string tuples with choices for all the repo groups where
     the user has the necessary permissions.
 
     Top level is -1.
     """
-    groups = RepoGroup.query().all()
+    groups = db.RepoGroup.query().all()
     if HasPermissionAny('hg.admin')('available repo groups'):
         groups.append(None)
     else:
         groups = list(RepoGroupList(groups, perm_level=repo_group_perm_level))
-        if top_perms and HasPermissionAny(*top_perms)('available repo groups'):
+        if HasPermissionAny('hg.create.repository')('available repo groups'):
             groups.append(None)
         for extra in extras:
             if not any(rg == extra for rg in groups):
                 groups.append(extra)
-    return RepoGroup.groups_choices(groups=groups)
+    return db.RepoGroup.groups_choices(groups=groups)
--- a/kallithea/model/ssh_key.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/ssh_key.py	Thu May 27 21:27:37 2021 +0200
@@ -29,10 +29,9 @@
 from tg.i18n import ugettext as _
 
 from kallithea.lib import ssh
-from kallithea.lib.utils2 import str2bool
+from kallithea.lib.utils2 import asbool
 from kallithea.lib.vcs.exceptions import RepositoryError
-from kallithea.model.db import User, UserSshKeys
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 
 
 log = logging.getLogger(__name__)
@@ -58,18 +57,18 @@
         if not description.strip():
             description = comment.strip()
 
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
 
-        new_ssh_key = UserSshKeys()
+        new_ssh_key = db.UserSshKeys()
         new_ssh_key.user_id = user.user_id
         new_ssh_key.description = description
         new_ssh_key.public_key = public_key
 
-        for ssh_key in UserSshKeys.query().filter(UserSshKeys.fingerprint == new_ssh_key.fingerprint).all():
+        for ssh_key in db.UserSshKeys.query().filter(db.UserSshKeys.fingerprint == new_ssh_key.fingerprint).all():
             raise SshKeyModelException(_('SSH key %s is already used by %s') %
                                        (new_ssh_key.fingerprint, ssh_key.user.username))
 
-        Session().add(new_ssh_key)
+        meta.Session().add(new_ssh_key)
 
         return new_ssh_key
 
@@ -78,24 +77,24 @@
         Deletes ssh key with given fingerprint for the given user.
         Will raise SshKeyModelException on errors
         """
-        ssh_key = UserSshKeys.query().filter(UserSshKeys.fingerprint == fingerprint)
+        ssh_key = db.UserSshKeys.query().filter(db.UserSshKeys.fingerprint == fingerprint)
 
-        user = User.guess_instance(user)
-        ssh_key = ssh_key.filter(UserSshKeys.user_id == user.user_id)
+        user = db.User.guess_instance(user)
+        ssh_key = ssh_key.filter(db.UserSshKeys.user_id == user.user_id)
 
         ssh_key = ssh_key.scalar()
         if ssh_key is None:
             raise SshKeyModelException(_('SSH key with fingerprint %r found') % fingerprint)
-        Session().delete(ssh_key)
+        meta.Session().delete(ssh_key)
 
     def get_ssh_keys(self, user):
-        user = User.guess_instance(user)
-        user_ssh_keys = UserSshKeys.query() \
-            .filter(UserSshKeys.user_id == user.user_id).all()
+        user = db.User.guess_instance(user)
+        user_ssh_keys = db.UserSshKeys.query() \
+            .filter(db.UserSshKeys.user_id == user.user_id).all()
         return user_ssh_keys
 
     def write_authorized_keys(self):
-        if not str2bool(config.get('ssh_enabled', False)):
+        if not asbool(config.get('ssh_enabled', False)):
             log.error("Will not write SSH authorized_keys file - ssh_enabled is not configured")
             return
         authorized_keys = config.get('ssh_authorized_keys')
@@ -131,7 +130,7 @@
         fh, tmp_authorized_keys = tempfile.mkstemp('.authorized_keys', dir=os.path.dirname(authorized_keys))
         with os.fdopen(fh, 'w') as f:
             f.write("# WARNING: This .ssh/authorized_keys file is managed by Kallithea. Manual editing or adding new entries will make Kallithea back off.\n")
-            for key in UserSshKeys.query().join(UserSshKeys.user).filter(User.active == True):
+            for key in db.UserSshKeys.query().join(db.UserSshKeys.user).filter(db.User.active == True):
                 f.write(ssh.authorized_keys_line(kallithea_cli_path, config['__file__'], key))
         os.chmod(tmp_authorized_keys, stat.S_IRUSR | stat.S_IWUSR)
         # Note: simple overwrite / rename isn't enough to replace the file on Windows
--- a/kallithea/model/user.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/user.py	Thu May 27 21:27:37 2021 +0200
@@ -36,10 +36,10 @@
 from tg import config
 from tg.i18n import ugettext as _
 
+from kallithea.lib import hooks, webutils
 from kallithea.lib.exceptions import DefaultUserException, UserOwnsReposException
-from kallithea.lib.utils2 import generate_api_key, get_current_authuser
-from kallithea.model.db import Permission, User, UserEmailMap, UserIpMap, UserToPerm
-from kallithea.model.meta import Session
+from kallithea.lib.utils2 import check_password, generate_api_key, get_crypt_password, get_current_authuser
+from kallithea.model import db, forms, meta, notification
 
 
 log = logging.getLogger(__name__)
@@ -49,18 +49,16 @@
     password_reset_token_lifetime = 86400 # 24 hours
 
     def get(self, user_id):
-        user = User.query()
+        user = db.User.query()
         return user.get(user_id)
 
     def get_user(self, user):
-        return User.guess_instance(user)
+        return db.User.guess_instance(user)
 
     def create(self, form_data, cur_user=None):
         if not cur_user:
             cur_user = getattr(get_current_authuser(), 'username', None)
 
-        from kallithea.lib.hooks import log_create_user, \
-            check_allowed_create_user
         _fd = form_data
         user_data = {
             'username': _fd['username'],
@@ -72,10 +70,9 @@
             'admin': False
         }
         # raises UserCreationError if it's not allowed
-        check_allowed_create_user(user_data, cur_user)
-        from kallithea.lib.auth import get_crypt_password
+        hooks.check_allowed_create_user(user_data, cur_user)
 
-        new_user = User()
+        new_user = db.User()
         for k, v in form_data.items():
             if k == 'password':
                 v = get_crypt_password(v)
@@ -84,10 +81,10 @@
             setattr(new_user, k, v)
 
         new_user.api_key = generate_api_key()
-        Session().add(new_user)
-        Session().flush() # make database assign new_user.user_id
+        meta.Session().add(new_user)
+        meta.Session().flush() # make database assign new_user.user_id
 
-        log_create_user(new_user.get_dict(), cur_user)
+        hooks.log_create_user(new_user.get_dict(), cur_user)
         return new_user
 
     def create_or_update(self, username, password, email, firstname='',
@@ -111,22 +108,19 @@
         if not cur_user:
             cur_user = getattr(get_current_authuser(), 'username', None)
 
-        from kallithea.lib.auth import get_crypt_password, check_password
-        from kallithea.lib.hooks import log_create_user, \
-            check_allowed_create_user
         user_data = {
             'username': username, 'password': password,
             'email': email, 'firstname': firstname, 'lastname': lastname,
             'active': active, 'admin': admin
         }
         # raises UserCreationError if it's not allowed
-        check_allowed_create_user(user_data, cur_user)
+        hooks.check_allowed_create_user(user_data, cur_user)
 
         log.debug('Checking for %s account in Kallithea database', username)
-        user = User.get_by_username(username, case_insensitive=True)
+        user = db.User.get_by_username(username, case_insensitive=True)
         if user is None:
             log.debug('creating new user %s', username)
-            new_user = User()
+            new_user = db.User()
             edit = False
         else:
             log.debug('updating user %s', username)
@@ -156,11 +150,11 @@
                     if password else ''
 
             if user is None:
-                Session().add(new_user)
-                Session().flush() # make database assign new_user.user_id
+                meta.Session().add(new_user)
+                meta.Session().flush() # make database assign new_user.user_id
 
             if not edit:
-                log_create_user(new_user.get_dict(), cur_user)
+                hooks.log_create_user(new_user.get_dict(), cur_user)
 
             return new_user
         except (DatabaseError,):
@@ -168,36 +162,24 @@
             raise
 
     def create_registration(self, form_data):
-        from kallithea.model.notification import NotificationModel
-        import kallithea.lib.helpers as h
-
         form_data['admin'] = False
-        form_data['extern_type'] = User.DEFAULT_AUTH_TYPE
+        form_data['extern_type'] = db.User.DEFAULT_AUTH_TYPE
         form_data['extern_name'] = ''
         new_user = self.create(form_data)
 
         # notification to admins
-        subject = _('New user registration')
-        body = (
-            'New user registration\n'
-            '---------------------\n'
-            '- Username: {user.username}\n'
-            '- Full Name: {user.full_name}\n'
-            '- Email: {user.email}\n'
-            ).format(user=new_user)
-        edit_url = h.canonical_url('edit_user', id=new_user.user_id)
+        edit_url = webutils.canonical_url('edit_user', id=new_user.user_id)
         email_kwargs = {
             'registered_user_url': edit_url,
             'new_username': new_user.username,
             'new_email': new_user.email,
             'new_full_name': new_user.full_name}
-        NotificationModel().create(created_by=new_user, subject=subject,
-                                   body=body, recipients=None,
-                                   type_=NotificationModel.TYPE_REGISTRATION,
+        notification.NotificationModel().create(created_by=new_user,
+                                   body=None, recipients=None,
+                                   type_=notification.NotificationModel.TYPE_REGISTRATION,
                                    email_kwargs=email_kwargs)
 
     def update(self, user_id, form_data, skip_attrs=None):
-        from kallithea.lib.auth import get_crypt_password
         skip_attrs = skip_attrs or []
         user = self.get(user_id)
         if user.is_default_user:
@@ -218,9 +200,7 @@
                 setattr(user, k, v)
 
     def update_user(self, user, **kwargs):
-        from kallithea.lib.auth import get_crypt_password
-
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
         if user.is_default_user:
             raise DefaultUserException(
                 _("You can't edit this user since it's"
@@ -237,7 +217,7 @@
     def delete(self, user, cur_user=None):
         if cur_user is None:
             cur_user = getattr(get_current_authuser(), 'username', None)
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
 
         if user.is_default_user:
             raise DefaultUserException(
@@ -261,10 +241,9 @@
                 _('User "%s" still owns %s user groups and cannot be '
                   'removed. Switch owners or remove those user groups: %s')
                 % (user.username, len(usergroups), ', '.join(usergroups)))
-        Session().delete(user)
+        meta.Session().delete(user)
 
-        from kallithea.lib.hooks import log_delete_user
-        log_delete_user(user.get_dict(), cur_user)
+        hooks.log_delete_user(user.get_dict(), cur_user)
 
     def can_change_password(self, user):
         from kallithea.lib import auth_modules
@@ -303,8 +282,8 @@
         guaranteed not to occur in any of the values.
         """
         app_secret = config.get('app_instance_uuid')
-        return hmac.HMAC(
-            key='\0'.join([app_secret, user.password]).encode('utf-8'),
+        return hmac.new(
+            '\0'.join([app_secret, user.password]).encode('utf-8'),
             msg='\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'),
             digestmod=hashlib.sha1,
         ).hexdigest()
@@ -317,53 +296,48 @@
         allowing users to copy-paste or manually enter the token from the
         email.
         """
-        from kallithea.lib.celerylib import tasks
-        from kallithea.model.notification import EmailNotificationModel
-        import kallithea.lib.helpers as h
-
         user_email = data['email']
-        user = User.get_by_email(user_email)
+        user = db.User.get_by_email(user_email)
         timestamp = int(time.time())
         if user is not None:
             if self.can_change_password(user):
                 log.debug('password reset user %s found', user)
                 token = self.get_reset_password_token(user,
                                                       timestamp,
-                                                      h.session_csrf_secret_token())
+                                                      webutils.session_csrf_secret_token())
                 # URL must be fully qualified; but since the token is locked to
                 # the current browser session, we must provide a URL with the
                 # current scheme and hostname, rather than the canonical_url.
-                link = h.url('reset_password_confirmation', qualified=True,
+                link = webutils.url('reset_password_confirmation', qualified=True,
                              email=user_email,
                              timestamp=timestamp,
                              token=token)
             else:
                 log.debug('password reset user %s found but was managed', user)
                 token = link = None
-            reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
-            body = EmailNotificationModel().get_email_tmpl(
+            reg_type = notification.EmailNotificationModel.TYPE_PASSWORD_RESET
+            body = notification.EmailNotificationModel().get_email_tmpl(
                 reg_type, 'txt',
                 user=user.short_contact,
                 reset_token=token,
                 reset_url=link)
-            html_body = EmailNotificationModel().get_email_tmpl(
+            html_body = notification.EmailNotificationModel().get_email_tmpl(
                 reg_type, 'html',
                 user=user.short_contact,
                 reset_token=token,
                 reset_url=link)
             log.debug('sending email')
-            tasks.send_email([user_email], _("Password reset link"), body, html_body)
+            notification.send_email([user_email], _("Password reset link"), body, html_body)
             log.info('send new password mail to %s', user_email)
         else:
             log.debug("password reset email %s not found", user_email)
 
-        return h.url('reset_password_confirmation',
+        return webutils.url('reset_password_confirmation',
                      email=user_email,
                      timestamp=timestamp)
 
     def verify_reset_password_token(self, email, timestamp, token):
-        import kallithea.lib.helpers as h
-        user = User.get_by_email(email)
+        user = db.User.get_by_email(email)
         if user is None:
             log.debug("user with email %s not found", email)
             return False
@@ -380,25 +354,23 @@
 
         expected_token = self.get_reset_password_token(user,
                                                        timestamp,
-                                                       h.session_csrf_secret_token())
+                                                       webutils.session_csrf_secret_token())
         log.debug('computed password reset token: %s', expected_token)
         log.debug('received password reset token: %s', token)
         return expected_token == token
 
     def reset_password(self, user_email, new_passwd):
-        from kallithea.lib.celerylib import tasks
-        from kallithea.lib import auth
-        user = User.get_by_email(user_email)
+        user = db.User.get_by_email(user_email)
         if user is not None:
             if not self.can_change_password(user):
                 raise Exception('trying to change password for external user')
-            user.password = auth.get_crypt_password(new_passwd)
-            Session().commit()
+            user.password = get_crypt_password(new_passwd)
+            meta.Session().commit()
             log.info('change password for %s', user_email)
         if new_passwd is None:
             raise Exception('unable to set new password')
 
-        tasks.send_email([user_email],
+        notification.send_email([user_email],
                  _('Password reset notification'),
                  _('The password to your account %s has been changed using password reset form.') % (user.username,))
         log.info('send password reset mail to %s', user_email)
@@ -406,11 +378,11 @@
         return True
 
     def has_perm(self, user, perm):
-        perm = Permission.guess_instance(perm)
-        user = User.guess_instance(user)
+        perm = db.Permission.guess_instance(perm)
+        user = db.User.guess_instance(user)
 
-        return UserToPerm.query().filter(UserToPerm.user == user) \
-            .filter(UserToPerm.permission == perm).scalar() is not None
+        return db.UserToPerm.query().filter(db.UserToPerm.user == user) \
+            .filter(db.UserToPerm.permission == perm).scalar() is not None
 
     def grant_perm(self, user, perm):
         """
@@ -419,19 +391,19 @@
         :param user:
         :param perm:
         """
-        user = User.guess_instance(user)
-        perm = Permission.guess_instance(perm)
+        user = db.User.guess_instance(user)
+        perm = db.Permission.guess_instance(perm)
         # if this permission is already granted skip it
-        _perm = UserToPerm.query() \
-            .filter(UserToPerm.user == user) \
-            .filter(UserToPerm.permission == perm) \
+        _perm = db.UserToPerm.query() \
+            .filter(db.UserToPerm.user == user) \
+            .filter(db.UserToPerm.permission == perm) \
             .scalar()
         if _perm:
             return
-        new = UserToPerm()
+        new = db.UserToPerm()
         new.user = user
         new.permission = perm
-        Session().add(new)
+        meta.Session().add(new)
         return new
 
     def revoke_perm(self, user, perm):
@@ -441,12 +413,12 @@
         :param user:
         :param perm:
         """
-        user = User.guess_instance(user)
-        perm = Permission.guess_instance(perm)
+        user = db.User.guess_instance(user)
+        perm = db.Permission.guess_instance(perm)
 
-        UserToPerm.query().filter(
-            UserToPerm.user == user,
-            UserToPerm.permission == perm,
+        db.UserToPerm.query().filter(
+            db.UserToPerm.user == user,
+            db.UserToPerm.permission == perm,
         ).delete()
 
     def add_extra_email(self, user, email):
@@ -456,15 +428,14 @@
         :param user:
         :param email:
         """
-        from kallithea.model import forms
         form = forms.UserExtraEmailForm()()
         data = form.to_python(dict(email=email))
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
 
-        obj = UserEmailMap()
+        obj = db.UserEmailMap()
         obj.user = user
         obj.email = data['email']
-        Session().add(obj)
+        meta.Session().add(obj)
         return obj
 
     def delete_extra_email(self, user, email_id):
@@ -474,10 +445,10 @@
         :param user:
         :param email_id:
         """
-        user = User.guess_instance(user)
-        obj = UserEmailMap.query().get(email_id)
+        user = db.User.guess_instance(user)
+        obj = db.UserEmailMap.query().get(email_id)
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
 
     def add_extra_ip(self, user, ip):
         """
@@ -486,15 +457,14 @@
         :param user:
         :param ip:
         """
-        from kallithea.model import forms
         form = forms.UserExtraIpForm()()
         data = form.to_python(dict(ip=ip))
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
 
-        obj = UserIpMap()
+        obj = db.UserIpMap()
         obj.user = user
         obj.ip_addr = data['ip']
-        Session().add(obj)
+        meta.Session().add(obj)
         return obj
 
     def delete_extra_ip(self, user, ip_id):
@@ -504,7 +474,7 @@
         :param user:
         :param ip_id:
         """
-        user = User.guess_instance(user)
-        obj = UserIpMap.query().get(ip_id)
+        user = db.User.guess_instance(user)
+        obj = db.UserIpMap.query().get(ip_id)
         if obj:
-            Session().delete(obj)
+            meta.Session().delete(obj)
--- a/kallithea/model/user_group.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/user_group.py	Thu May 27 21:27:37 2021 +0200
@@ -28,8 +28,7 @@
 import traceback
 
 from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException
-from kallithea.model.db import (Permission, Session, User, UserGroup, UserGroupMember, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm,
-                                UserUserGroupToPerm)
+from kallithea.model import db, meta
 
 
 log = logging.getLogger(__name__)
@@ -40,18 +39,18 @@
     def _create_default_perms(self, user_group):
         # create default permission
         default_perm = 'usergroup.read'
-        def_user = User.get_default_user()
+        def_user = db.User.get_default_user()
         for p in def_user.user_perms:
             if p.permission.permission_name.startswith('usergroup.'):
                 default_perm = p.permission.permission_name
                 break
 
-        user_group_to_perm = UserUserGroupToPerm()
-        user_group_to_perm.permission = Permission.get_by_key(default_perm)
+        user_group_to_perm = db.UserUserGroupToPerm()
+        user_group_to_perm.permission = db.Permission.get_by_key(default_perm)
 
         user_group_to_perm.user_group = user_group
         user_group_to_perm.user_id = def_user.user_id
-        Session().add(user_group_to_perm)
+        meta.Session().add(user_group_to_perm)
         return user_group_to_perm
 
     def _update_permissions(self, user_group, perms_new=None,
@@ -89,24 +88,24 @@
                     )
 
     def get(self, user_group_id):
-        return UserGroup.get(user_group_id)
+        return db.UserGroup.get(user_group_id)
 
     def get_group(self, user_group):
-        return UserGroup.guess_instance(user_group)
+        return db.UserGroup.guess_instance(user_group)
 
     def get_by_name(self, name, case_insensitive=False):
-        return UserGroup.get_by_group_name(name, case_insensitive=case_insensitive)
+        return db.UserGroup.get_by_group_name(name, case_insensitive=case_insensitive)
 
     def create(self, name, description, owner, active=True, group_data=None):
         try:
-            new_user_group = UserGroup()
-            new_user_group.owner = User.guess_instance(owner)
+            new_user_group = db.UserGroup()
+            new_user_group.owner = db.User.guess_instance(owner)
             new_user_group.users_group_name = name
             new_user_group.user_group_description = description
             new_user_group.users_group_active = active
             if group_data:
                 new_user_group.group_data = group_data
-            Session().add(new_user_group)
+            meta.Session().add(new_user_group)
             self._create_default_perms(new_user_group)
 
             self.grant_user_permission(user_group=new_user_group,
@@ -120,7 +119,7 @@
     def update(self, user_group, form_data):
 
         try:
-            user_group = UserGroup.guess_instance(user_group)
+            user_group = db.UserGroup.guess_instance(user_group)
 
             for k, v in form_data.items():
                 if k == 'users_group_members':
@@ -128,15 +127,15 @@
                     if v:
                         v = [v] if isinstance(v, str) else v
                         for u_id in set(v):
-                            member = UserGroupMember(user_group.users_group_id, u_id)
+                            member = db.UserGroupMember(user_group.users_group_id, u_id)
                             members_list.append(member)
-                            Session().add(member)
+                            meta.Session().add(member)
                     user_group.members = members_list
                 setattr(user_group, k, v)
 
             # Flush to make db assign users_group_member_id to newly
             # created UserGroupMembers.
-            Session().flush()
+            meta.Session().flush()
         except Exception:
             log.error(traceback.format_exc())
             raise
@@ -150,25 +149,25 @@
         :param user_group:
         :param force:
         """
-        user_group = UserGroup.guess_instance(user_group)
+        user_group = db.UserGroup.guess_instance(user_group)
         try:
             # check if this group is not assigned to repo
-            assigned_groups = UserGroupRepoToPerm.query() \
-                .filter(UserGroupRepoToPerm.users_group == user_group).all()
+            assigned_groups = db.UserGroupRepoToPerm.query() \
+                .filter(db.UserGroupRepoToPerm.users_group == user_group).all()
             assigned_groups = [x.repository.repo_name for x in assigned_groups]
 
             if assigned_groups and not force:
                 raise UserGroupsAssignedException(
                     'User Group assigned to %s' % ", ".join(assigned_groups))
-            Session().delete(user_group)
+            meta.Session().delete(user_group)
         except Exception:
             log.error(traceback.format_exc())
             raise
 
     def add_user_to_group(self, user_group, user):
         """Return True if user already is in the group - else return the new UserGroupMember"""
-        user_group = UserGroup.guess_instance(user_group)
-        user = User.guess_instance(user)
+        user_group = db.UserGroup.guess_instance(user_group)
+        user = db.User.guess_instance(user)
 
         for m in user_group.members:
             u = m.user
@@ -177,22 +176,22 @@
                 return True
 
         try:
-            user_group_member = UserGroupMember()
+            user_group_member = db.UserGroupMember()
             user_group_member.user = user
             user_group_member.users_group = user_group
 
             user_group.members.append(user_group_member)
             user.group_member.append(user_group_member)
 
-            Session().add(user_group_member)
+            meta.Session().add(user_group_member)
             return user_group_member
         except Exception:
             log.error(traceback.format_exc())
             raise
 
     def remove_user_from_group(self, user_group, user):
-        user_group = UserGroup.guess_instance(user_group)
-        user = User.guess_instance(user)
+        user_group = db.UserGroup.guess_instance(user_group)
+        user = db.User.guess_instance(user)
 
         user_group_member = None
         for m in user_group.members:
@@ -203,7 +202,7 @@
 
         if user_group_member:
             try:
-                Session().delete(user_group_member)
+                meta.Session().delete(user_group_member)
                 return True
             except Exception:
                 log.error(traceback.format_exc())
@@ -213,40 +212,40 @@
             return False
 
     def has_perm(self, user_group, perm):
-        user_group = UserGroup.guess_instance(user_group)
-        perm = Permission.guess_instance(perm)
+        user_group = db.UserGroup.guess_instance(user_group)
+        perm = db.Permission.guess_instance(perm)
 
-        return UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group == user_group) \
-            .filter(UserGroupToPerm.permission == perm).scalar() is not None
+        return db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group == user_group) \
+            .filter(db.UserGroupToPerm.permission == perm).scalar() is not None
 
     def grant_perm(self, user_group, perm):
-        user_group = UserGroup.guess_instance(user_group)
-        perm = Permission.guess_instance(perm)
+        user_group = db.UserGroup.guess_instance(user_group)
+        perm = db.Permission.guess_instance(perm)
 
         # if this permission is already granted skip it
-        _perm = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group == user_group) \
-            .filter(UserGroupToPerm.permission == perm) \
+        _perm = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group == user_group) \
+            .filter(db.UserGroupToPerm.permission == perm) \
             .scalar()
         if _perm:
             return
 
-        new = UserGroupToPerm()
+        new = db.UserGroupToPerm()
         new.users_group = user_group
         new.permission = perm
-        Session().add(new)
+        meta.Session().add(new)
         return new
 
     def revoke_perm(self, user_group, perm):
-        user_group = UserGroup.guess_instance(user_group)
-        perm = Permission.guess_instance(perm)
+        user_group = db.UserGroup.guess_instance(user_group)
+        perm = db.Permission.guess_instance(perm)
 
-        obj = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group == user_group) \
-            .filter(UserGroupToPerm.permission == perm).scalar()
+        obj = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group == user_group) \
+            .filter(db.UserGroupToPerm.permission == perm).scalar()
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
 
     def grant_user_permission(self, user_group, user, perm):
         """
@@ -259,19 +258,19 @@
         :param perm: Instance of Permission, or permission_name
         """
 
-        user_group = UserGroup.guess_instance(user_group)
-        user = User.guess_instance(user)
-        permission = Permission.guess_instance(perm)
+        user_group = db.UserGroup.guess_instance(user_group)
+        user = db.User.guess_instance(user)
+        permission = db.Permission.guess_instance(perm)
 
         # check if we have that permission already
-        obj = UserUserGroupToPerm.query() \
-            .filter(UserUserGroupToPerm.user == user) \
-            .filter(UserUserGroupToPerm.user_group == user_group) \
+        obj = db.UserUserGroupToPerm.query() \
+            .filter(db.UserUserGroupToPerm.user == user) \
+            .filter(db.UserUserGroupToPerm.user_group == user_group) \
             .scalar()
         if obj is None:
             # create new !
-            obj = UserUserGroupToPerm()
-            Session().add(obj)
+            obj = db.UserUserGroupToPerm()
+            meta.Session().add(obj)
         obj.user_group = user_group
         obj.user = user
         obj.permission = permission
@@ -287,15 +286,15 @@
         :param user: Instance of User, user_id or username
         """
 
-        user_group = UserGroup.guess_instance(user_group)
-        user = User.guess_instance(user)
+        user_group = db.UserGroup.guess_instance(user_group)
+        user = db.User.guess_instance(user)
 
-        obj = UserUserGroupToPerm.query() \
-            .filter(UserUserGroupToPerm.user == user) \
-            .filter(UserUserGroupToPerm.user_group == user_group) \
+        obj = db.UserUserGroupToPerm.query() \
+            .filter(db.UserUserGroupToPerm.user == user) \
+            .filter(db.UserUserGroupToPerm.user_group == user_group) \
             .scalar()
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
             log.debug('Revoked perm on %s on %s', user_group, user)
 
     def grant_user_group_permission(self, target_user_group, user_group, perm):
@@ -306,23 +305,23 @@
         :param user_group:
         :param perm:
         """
-        target_user_group = UserGroup.guess_instance(target_user_group)
-        user_group = UserGroup.guess_instance(user_group)
-        permission = Permission.guess_instance(perm)
+        target_user_group = db.UserGroup.guess_instance(target_user_group)
+        user_group = db.UserGroup.guess_instance(user_group)
+        permission = db.Permission.guess_instance(perm)
         # forbid assigning same user group to itself
         if target_user_group == user_group:
             raise RepoGroupAssignmentError('target repo:%s cannot be '
                                            'assigned to itself' % target_user_group)
 
         # check if we have that permission already
-        obj = UserGroupUserGroupToPerm.query() \
-            .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group) \
-            .filter(UserGroupUserGroupToPerm.user_group == user_group) \
+        obj = db.UserGroupUserGroupToPerm.query() \
+            .filter(db.UserGroupUserGroupToPerm.target_user_group == target_user_group) \
+            .filter(db.UserGroupUserGroupToPerm.user_group == user_group) \
             .scalar()
         if obj is None:
             # create new !
-            obj = UserGroupUserGroupToPerm()
-            Session().add(obj)
+            obj = db.UserGroupUserGroupToPerm()
+            meta.Session().add(obj)
         obj.user_group = user_group
         obj.target_user_group = target_user_group
         obj.permission = permission
@@ -336,19 +335,19 @@
         :param target_user_group:
         :param user_group:
         """
-        target_user_group = UserGroup.guess_instance(target_user_group)
-        user_group = UserGroup.guess_instance(user_group)
+        target_user_group = db.UserGroup.guess_instance(target_user_group)
+        user_group = db.UserGroup.guess_instance(user_group)
 
-        obj = UserGroupUserGroupToPerm.query() \
-            .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group) \
-            .filter(UserGroupUserGroupToPerm.user_group == user_group) \
+        obj = db.UserGroupUserGroupToPerm.query() \
+            .filter(db.UserGroupUserGroupToPerm.target_user_group == target_user_group) \
+            .filter(db.UserGroupUserGroupToPerm.user_group == user_group) \
             .scalar()
         if obj is not None:
-            Session().delete(obj)
+            meta.Session().delete(obj)
             log.debug('Revoked perm on %s on %s', target_user_group, user_group)
 
     def enforce_groups(self, user, groups, extern_type=None):
-        user = User.guess_instance(user)
+        user = db.User.guess_instance(user)
         log.debug('Enforcing groups %s on user %s', user, groups)
         current_groups = user.group_member
         # find the external created groups
@@ -363,9 +362,9 @@
                 self.remove_user_from_group(gr, user)
 
         # now we calculate in which groups user should be == groups params
-        owner = User.get_first_admin().username
+        owner = db.User.get_first_admin().username
         for gr in set(groups):
-            existing_group = UserGroup.get_by_group_name(gr)
+            existing_group = db.UserGroup.get_by_group_name(gr)
             if not existing_group:
                 desc = 'Automatically created from plugin:%s' % extern_type
                 # we use first admin account to set the owner of the group
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/model/userlog.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+kallithea.model.userlog
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Database Models for Kallithea
+
+This file was forked by the Kallithea project in July 2014.
+Original author and date, and relevant copyright and licensing information is below:
+:created_on: Apr 08, 2010
+:author: marcink
+:copyright: (c) 2013 RhodeCode GmbH, and others.
+:license: GPLv3, see LICENSE.md for more details.
+"""
+
+
+import datetime
+import logging
+
+from kallithea.lib.utils2 import get_current_authuser
+from kallithea.model import db, meta
+
+
+log = logging.getLogger(__name__)
+
+
+def action_logger(user, action, repo, ipaddr='', commit=False):
+    """
+    Action logger for various actions made by users
+
+    :param user: user that made this action, can be a unique username string or
+        object containing user_id attribute
+    :param action: action to log, should be on of predefined unique actions for
+        easy translations
+    :param repo: string name of repository or object containing repo_id,
+        that action was made on
+    :param ipaddr: optional IP address from what the action was made
+
+    """
+
+    # if we don't get explicit IP address try to get one from registered user
+    # in tmpl context var
+    if not ipaddr:
+        ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
+
+    if getattr(user, 'user_id', None):
+        user_obj = db.User.get(user.user_id)
+    elif isinstance(user, str):
+        user_obj = db.User.get_by_username(user)
+    else:
+        raise Exception('You have to provide a user object or a username')
+
+    if getattr(repo, 'repo_id', None):
+        repo_obj = db.Repository.get(repo.repo_id)
+        repo_name = repo_obj.repo_name
+    elif isinstance(repo, str):
+        repo_name = repo.lstrip('/')
+        repo_obj = db.Repository.get_by_repo_name(repo_name)
+    else:
+        repo_obj = None
+        repo_name = ''
+
+    user_log = db.UserLog()
+    user_log.user_id = user_obj.user_id
+    user_log.username = user_obj.username
+    user_log.action = action
+
+    user_log.repository = repo_obj
+    user_log.repository_name = repo_name
+
+    user_log.action_date = datetime.datetime.now()
+    user_log.user_ip = ipaddr
+    meta.Session().add(user_log)
+
+    log.info('Logging action:%s on %s by user:%s ip:%s',
+             action, repo, user_obj, ipaddr)
+    if commit:
+        meta.Session().commit()
--- a/kallithea/model/validators.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/model/validators.py	Thu May 27 21:27:37 2021 +0200
@@ -27,14 +27,13 @@
 from sqlalchemy import func
 from tg.i18n import ugettext as _
 
-from kallithea.config.routing import ADMIN_PREFIX
-from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel
+import kallithea
+from kallithea.lib import auth
 from kallithea.lib.compat import OrderedSet
 from kallithea.lib.exceptions import InvalidCloneUriException, LdapImportError
 from kallithea.lib.utils import is_valid_repo_uri
-from kallithea.lib.utils2 import aslist, repo_name_slug, str2bool
+from kallithea.lib.utils2 import asbool, aslist, repo_name_slug
 from kallithea.model import db
-from kallithea.model.db import RepoGroup, Repository, User, UserGroup
 
 
 # silence warnings and pylint
@@ -86,10 +85,10 @@
             # check if user is unique
             old_un = None
             if edit:
-                old_un = User.get(old_data.get('user_id')).username
+                old_un = db.User.get(old_data.get('user_id')).username
 
             if old_un != value or not edit:
-                if User.get_by_username(value, case_insensitive=True):
+                if db.User.get_by_username(value, case_insensitive=True):
                     msg = self.message('username_exists', state, username=value)
                     raise formencode.Invalid(msg, value, state)
 
@@ -113,8 +112,8 @@
 
         def _validate_python(self, value, state):
             try:
-                User.query().filter(User.active == True) \
-                    .filter(User.username == value).one()
+                db.User.query().filter(db.User.active == True) \
+                    .filter(db.User.username == value).one()
             except sqlalchemy.exc.InvalidRequestError: # NoResultFound/MultipleResultsFound
                 msg = self.message('invalid_username', state, username=value)
                 raise formencode.Invalid(msg, value, state,
@@ -147,10 +146,10 @@
             old_ugname = None
             if edit:
                 old_id = old_data.get('users_group_id')
-                old_ugname = UserGroup.get(old_id).users_group_name
+                old_ugname = db.UserGroup.get(old_id).users_group_name
 
             if old_ugname != value or not edit:
-                is_existing_group = UserGroup.get_by_group_name(value,
+                is_existing_group = db.UserGroup.get_by_group_name(value,
                                                         case_insensitive=True)
                 if is_existing_group:
                     msg = self.message('group_exist', state, usergroup=value)
@@ -195,14 +194,14 @@
 
             old_gname = None
             if edit:
-                old_gname = RepoGroup.get(old_data.get('group_id')).group_name
+                old_gname = db.RepoGroup.get(old_data.get('group_id')).group_name
 
             if old_gname != group_name or not edit:
 
                 # check group
-                gr = RepoGroup.query() \
-                      .filter(func.lower(RepoGroup.group_name) == func.lower(slug)) \
-                      .filter(RepoGroup.parent_group_id == parent_group_id) \
+                gr = db.RepoGroup.query() \
+                      .filter(func.lower(db.RepoGroup.group_name) == func.lower(slug)) \
+                      .filter(db.RepoGroup.parent_group_id == parent_group_id) \
                       .scalar()
                 if gr is not None:
                     msg = self.message('group_exists', state, group_name=slug)
@@ -211,8 +210,8 @@
                     )
 
                 # check for same repo
-                repo = Repository.query() \
-                      .filter(func.lower(Repository.repo_name) == func.lower(slug)) \
+                repo = db.Repository.query() \
+                      .filter(func.lower(db.Repository.repo_name) == func.lower(slug)) \
                       .scalar()
                 if repo is not None:
                     msg = self.message('repo_exists', state, group_name=slug)
@@ -285,7 +284,7 @@
             # authenticate returns unused dict but has called
             # plugin._authenticate which has create_or_update'ed the username user in db
             if auth_modules.authenticate(username, password) is None:
-                user = User.get_by_username_or_email(username)
+                user = db.User.get_by_username_or_email(username)
                 if user and not user.active:
                     log.warning('user %s is disabled', username)
                     msg = self.message('invalid_auth', state)
@@ -320,13 +319,13 @@
             repo_name = repo_name_slug(value.get('repo_name', ''))
             repo_group = value.get('repo_group')
             if repo_group:
-                gr = RepoGroup.get(repo_group)
+                gr = db.RepoGroup.get(repo_group)
                 group_path = gr.full_path
                 group_name = gr.group_name
                 # value needs to be aware of group name in order to check
                 # db key This is an actual just the name to store in the
                 # database
-                repo_name_full = group_path + db.URL_SEP + repo_name
+                repo_name_full = group_path + kallithea.URL_SEP + repo_name
             else:
                 group_name = group_path = ''
                 repo_name_full = repo_name
@@ -343,7 +342,7 @@
             group_path = value.get('group_path')
             group_name = value.get('group_name')
 
-            if repo_name in [ADMIN_PREFIX, '']:
+            if repo_name in [kallithea.ADMIN_PREFIX, '']:
                 msg = self.message('invalid_repo_name', state, repo=repo_name)
                 raise formencode.Invalid(msg, value, state,
                     error_dict=dict(repo_name=msg)
@@ -352,8 +351,8 @@
             rename = old_data.get('repo_name') != repo_name_full
             create = not edit
             if rename or create:
-                repo = Repository.get_by_repo_name(repo_name_full, case_insensitive=True)
-                repo_group = RepoGroup.get_by_group_name(repo_name_full, case_insensitive=True)
+                repo = db.Repository.get_by_repo_name(repo_name_full, case_insensitive=True)
+                repo_group = db.RepoGroup.get_by_group_name(repo_name_full, case_insensitive=True)
                 if group_path != '':
                     if repo is not None:
                         msg = self.message('repository_in_group_exists', state,
@@ -400,7 +399,7 @@
         messages = {
             'clone_uri': _('Invalid repository URL'),
             'invalid_clone_uri': _('Invalid repository URL. It must be a '
-                                   'valid http, https, ssh, svn+http or svn+https URL'),
+                                   'valid http, https, or ssh URL'),
         }
 
         def _validate_python(self, value, state):
@@ -452,17 +451,16 @@
             return value
 
         def _validate_python(self, value, state):
-            gr = RepoGroup.get(value)
+            gr = db.RepoGroup.get(value)
             gr_name = gr.group_name if gr is not None else None # None means ROOT location
 
             # create repositories with write permission on group is set to true
-            create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')()
-            group_admin = HasRepoGroupPermissionLevel('admin')(gr_name,
+            group_admin = auth.HasRepoGroupPermissionLevel('admin')(gr_name,
                                             'can write into group validator')
-            group_write = HasRepoGroupPermissionLevel('write')(gr_name,
+            group_write = auth.HasRepoGroupPermissionLevel('write')(gr_name,
                                             'can write into group validator')
-            forbidden = not (group_admin or (group_write and create_on_write))
-            can_create_repos = HasPermissionAny('hg.admin', 'hg.create.repository')
+            forbidden = not (group_admin or group_write)
+            can_create_repos = auth.HasPermissionAny('hg.admin', 'hg.create.repository')
             gid = (old_data['repo_group'].get('group_id')
                    if (old_data and 'repo_group' in old_data) else None)
             value_changed = gid != value
@@ -502,7 +500,7 @@
             return value
 
         def _validate_python(self, value, state):
-            gr = RepoGroup.get(value)
+            gr = db.RepoGroup.get(value)
             gr_name = gr.group_name if gr is not None else None # None means ROOT location
 
             if can_create_in_root and gr is None:
@@ -510,7 +508,7 @@
                 return
 
             forbidden_in_root = gr is None and not can_create_in_root
-            forbidden = not HasRepoGroupPermissionLevel('admin')(gr_name, 'can create group validator')
+            forbidden = not auth.HasRepoGroupPermissionLevel('admin')(gr_name, 'can create group validator')
             if forbidden_in_root or forbidden:
                 msg = self.message('permission_denied', state)
                 raise formencode.Invalid(msg, value, state,
@@ -567,8 +565,8 @@
                     t = {'u': 'user',
                          'g': 'users_group'
                     }[k[0]]
-                    if member_name == User.DEFAULT_USER_NAME:
-                        if str2bool(value.get('repo_private')):
+                    if member_name == db.User.DEFAULT_USER_NAME:
+                        if asbool(value.get('repo_private')):
                             # set none for default when updating to
                             # private repo protects against form manipulation
                             v = EMPTY_PERM
@@ -581,13 +579,13 @@
             for k, v, t in perms_new:
                 try:
                     if t == 'user':
-                        _user_db = User.query() \
-                            .filter(User.active == True) \
-                            .filter(User.username == k).one()
+                        _user_db = db.User.query() \
+                            .filter(db.User.active == True) \
+                            .filter(db.User.username == k).one()
                     if t == 'users_group':
-                        _user_db = UserGroup.query() \
-                            .filter(UserGroup.users_group_active == True) \
-                            .filter(UserGroup.users_group_name == k).one()
+                        _user_db = db.UserGroup.query() \
+                            .filter(db.UserGroup.users_group_active == True) \
+                            .filter(db.UserGroup.users_group_name == k).one()
 
                 except Exception as e:
                     log.warning('Error validating %s permission %s', t, k)
@@ -649,7 +647,7 @@
 
         def _validate_python(self, value, state):
             if (old_data.get('email') or '').lower() != value:
-                user = User.get_by_email(value)
+                user = db.User.get_by_email(value)
                 if user is not None:
                     msg = self.message('email_taken', state)
                     raise formencode.Invalid(msg, value, state,
@@ -668,7 +666,7 @@
             return value.lower()
 
         def _validate_python(self, value, state):
-            user = User.get_by_email(value)
+            user = db.User.get_by_email(value)
             if user is None:
                 msg = self.message('non_existing_email', state, email=value)
                 raise formencode.Invalid(msg, value, state,
--- a/kallithea/public/js/base.js	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/public/js/base.js	Thu May 27 21:27:37 2021 +0200
@@ -528,7 +528,7 @@
         var line_no = $anchorcomment.data('line_no');
         if ($comment_div[0]) {
             $comment_div.append($anchorcomment.children());
-            if (f_path && line_no) {
+            if (f_path && line_no !== '') {
                 _comment_div_append_add($comment_div, f_path, line_no);
             } else {
                 _comment_div_append_form($comment_div, f_path, line_no);
@@ -667,7 +667,7 @@
             }
         }
         function failure(x, s, e) {
-            $preview.removeClass('submitting').addClass('failed');
+            $preview.removeClass('submitting').children('.panel').addClass('panel-danger');
             var $status = $preview.find('.comment-submission-status');
             $('<span>', {
                 'title': e,
@@ -681,7 +681,7 @@
                     text: _TM['Retry']
                 }).click(function() {
                     $status.text(_TM['Submitting ...']);
-                    $preview.addClass('submitting').removeClass('failed');
+                    $preview.addClass('submitting').children('.panel').removeClass('panel-danger');
                     ajaxPOST(AJAX_COMMENT_URL, postData, success, failure);
                 }),
                 $('<button>', {
--- a/kallithea/templates/about.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/about.html	Thu May 27 21:27:37 2021 +0200
@@ -24,20 +24,27 @@
   necessarily limited to the following:</p>
   <ul>
 
-  <li>Copyright &copy; 2012&ndash;2020, Mads Kiilerich</li>
-  <li>Copyright &copy; 2014&ndash;2020, Thomas De Schampheleire</li>
-  <li>Copyright &copy; 2015&ndash;2017, 2019&ndash;2020, Étienne Gilli</li>
+  <li>Copyright &copy; 2012&ndash;2021, Mads Kiilerich</li>
+  <li>Copyright &copy; 2014&ndash;2021, Thomas De Schampheleire</li>
+  <li>Copyright &copy; 2015&ndash;2017, 2019&ndash;2021, Étienne Gilli</li>
+  <li>Copyright &copy; 2018&ndash;2021, ssantos</li>
+  <li>Copyright &copy; 2019&ndash;2021, Private</li>
+  <li>Copyright &copy; 2020&ndash;2021, fresh</li>
+  <li>Copyright &copy; 2020&ndash;2021, robertus</li>
+  <li>Copyright &copy; 2021, Eugenia Russell</li>
+  <li>Copyright &copy; 2021, Michalis</li>
+  <li>Copyright &copy; 2021, vs</li>
+  <li>Copyright &copy; 2021, Александр</li>
   <li>Copyright &copy; 2016&ndash;2017, 2020, Asterios Dimitriou</li>
   <li>Copyright &copy; 2017&ndash;2020, Allan Nordhøy</li>
   <li>Copyright &copy; 2017, 2020, Anton Schur</li>
-  <li>Copyright &copy; 2018&ndash;2020, ssantos</li>
   <li>Copyright &copy; 2019&ndash;2020, Manuel Jacob</li>
-  <li>Copyright &copy; 2019&ndash;2020, Private</li>
+  <li>Copyright &copy; 2020, Artem</li>
   <li>Copyright &copy; 2020, David Ignjić</li>
   <li>Copyright &copy; 2020, Dennis Fink</li>
   <li>Copyright &copy; 2020, J. Lavoie</li>
-  <li>Copyright &copy; 2020, robertus</li>
   <li>Copyright &copy; 2020, Ross Thomas</li>
+  <li>Copyright &copy; 2020, Tim Ooms</li>
   <li>Copyright &copy; 2012, 2014&ndash;2017, 2019, Andrej Shadura</li>
   <li>Copyright &copy; 2019, Adi Kriegisch</li>
   <li>Copyright &copy; 2019, Danni Randeris</li>
--- a/kallithea/templates/admin/admin.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/admin.html	Thu May 27 21:27:37 2021 +0200
@@ -27,7 +27,8 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+'use strict';
 $(document).ready(function() {
   $('#j_filter').click(function(){
     var $jfilter = $('#j_filter');
--- a/kallithea/templates/admin/admin_log.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/admin_log.html	Thu May 27 21:27:37 2021 +0200
@@ -37,7 +37,8 @@
     %endfor
 </table>
 
-<script>'use strict';
+<script>
+  'use strict';
   $(document).ready(function(){
     var $user_log = $('#user_log');
     $user_log.on('click','.pager_link',function(e){
--- a/kallithea/templates/admin/auth/auth_settings.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/auth/auth_settings.html	Thu May 27 21:27:37 2021 +0200
@@ -105,7 +105,8 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     $('.toggle-plugin').click(function(e){
         var $auth_plugins_input = $('#auth_plugins');
         function notEmpty(element) {
--- a/kallithea/templates/admin/gists/edit.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/gists/edit.html	Thu May 27 21:27:37 2021 +0200
@@ -35,7 +35,8 @@
               ${(h.HTML(_('Gist was updated since you started editing. Copy your changes and click %(here)s to reload new version.'))
                              % {'here': h.link_to(_('here'),h.url('edit_gist', gist_id=c.gist.gist_access_id))})}
             </div>
-            <script>'use strict';
+            <script>
+            'use strict';
             if (typeof jQuery != 'undefined') {
                 $(".alert").alert();
             }
@@ -60,7 +61,7 @@
                      %else:
                       ${_('Expires')}: ${h.age(h.time_to_datetime(c.gist.gist_expires))}
                      %endif
-                   </span>
+                    </span>
                 </div>
             </div>
 
@@ -79,7 +80,8 @@
                 </div>
 
                 ## dynamic edit box.
-                <script>'use strict';
+                <script>
+                    'use strict';
                     $(document).ready(function(){
                         var myCodeMirror = initCodeMirror(${h.js('editor_' + h.FID('f',file.path))}, ${h.jshtml(request.script_name)}, '');
 
@@ -146,7 +148,8 @@
             <a class="btn btn-default" href="${h.url('gist', gist_id=c.gist.gist_access_id)}">${_('Cancel')}</a>
             </div>
           ${h.end_form()}
-          <script>'use strict';
+          <script>
+              'use strict';
               $('#update').on('click', function(e){
                   e.preventDefault();
 
--- a/kallithea/templates/admin/gists/index.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/gists/index.html	Thu May 27 21:27:37 2021 +0200
@@ -44,7 +44,7 @@
           <div class="gist-item clearfix">
             ${h.gravatar_div(gist.owner.email, size=28)}
             <span title="${gist.owner.full_contact}" class="user">
-                <b>${h.person(gist.owner.full_contact)}</b> /
+                <b>${gist.owner.username}</b> /
                 <b><a href="${h.url('gist',gist_id=gist.gist_access_id)}">gist: ${gist.gist_access_id}</a></b>
             </span>
             <div>
--- a/kallithea/templates/admin/gists/new.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/gists/new.html	Thu May 27 21:27:37 2021 +0200
@@ -55,7 +55,8 @@
             ${h.reset('reset',_('Reset'),class_="btn btn-default btn-xs")}
             </div>
           ${h.end_form()}
-          <script>'use strict';
+          <script>
+            'use strict';
             $(document).ready(function(){
                 var myCodeMirror = initCodeMirror('editor', ${h.jshtml(request.script_name)}, '');
 
--- a/kallithea/templates/admin/my_account/my_account_api_keys.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/my_account/my_account_api_keys.html	Thu May 27 21:27:37 2021 +0200
@@ -90,7 +90,8 @@
 ''')}</p>
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $("#lifetime").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/my_account/my_account_repos.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/my_account/my_account_repos.html	Thu May 27 21:27:37 2021 +0200
@@ -4,7 +4,8 @@
     <table class="table" id="datatable_list_wrap" width="100%"></table>
 </div>
 
-<script>'use strict';
+<script>
+  'use strict';
   var data = ${h.js(c.data)};
   $("#datatable_list_wrap").DataTable({
         data: data.records,
--- a/kallithea/templates/admin/my_account/my_account_watched.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/my_account/my_account_watched.html	Thu May 27 21:27:37 2021 +0200
@@ -4,7 +4,8 @@
     <table class="table" id="datatable_list_wrap" width="100%"></table>
 </div>
 
-<script>'use strict';
+<script>
+  'use strict';
   var data = ${h.js(c.data)};
   $("#datatable_list_wrap").DataTable({
         data: data.records,
--- a/kallithea/templates/admin/permissions/permissions_globals.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/permissions/permissions_globals.html	Thu May 27 21:27:37 2021 +0200
@@ -57,13 +57,6 @@
                 </div>
             </div>
             <div class="form-group">
-                <label class="control-label" for="create_on_write">${_('Repository creation with group write access')}:</label>
-                <div>
-                    ${h.select('create_on_write','',c.repo_create_on_write_choices,class_='form-control')}
-                    <span class="help-block">${_('With this, write permission to a repository group allows creating repositories inside that group. Without this, group write permissions mean nothing.')}</span>
-                </div>
-            </div>
-            <div class="form-group">
                 <label class="control-label" for="default_user_group_create">${_('User group creation')}:</label>
                 <div>
                     ${h.select('default_user_group_create','',c.user_group_create_choices,class_='form-control')}
--- a/kallithea/templates/admin/permissions/permissions_ips.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/permissions/permissions_ips.html	Thu May 27 21:27:37 2021 +0200
@@ -14,9 +14,9 @@
               </td>
           </tr>
         %endfor
-       %else:
+      %else:
         <tr><td><div class="ip">${_('All IP addresses are allowed.')}</div></td></tr>
-       %endif
+      %endif
 </table>
 
 ${h.form(url('edit_user_ips_update', id=c.user.user_id))}
--- a/kallithea/templates/admin/repo_groups/repo_group_add.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_group_add.html	Thu May 27 21:27:37 2021 +0200
@@ -61,7 +61,8 @@
     </div>
     ${h.end_form()}
 </div>
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         function setCopyPermsOption(group_val){
             if(group_val != "-1"){
--- a/kallithea/templates/admin/repo_groups/repo_group_edit_advanced.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_group_edit_advanced.html	Thu May 27 21:27:37 2021 +0200
@@ -7,7 +7,7 @@
     (_('Total repositories'), c.repo_group.repositories_recursive_count, ''),
     (_('Children groups'), c.repo_group.children.count(), ''),
     (_('Created on'), h.fmt_date(c.repo_group.created_on), ''),
-    (_('Owner'), h.person(c.repo_group.owner), ''),
+    (_('Owner'), c.repo_group.owner.username, ''),
  ]
 %>
 %for dt, dd, tt in elems:
--- a/kallithea/templates/admin/repo_groups/repo_group_edit_perms.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_group_edit_perms.html	Thu May 27 21:27:37 2021 +0200
@@ -102,10 +102,11 @@
 </div>
 ${h.end_form()}
 
-<script>'use strict';
+<script>
+    'use strict';
     function ajaxActionRevoke(obj_id, obj_type, field_id, obj_name) {
         let url = ${h.jshtml(h.url('edit_repo_group_perms_delete', group_name=c.repo_group.group_name))};
-        var revoke_msg = _TM['Confirm to revoke permission for {0}: {1} ?'].format(obj_type.replace('_', ' '), obj_name);
+        var revoke_msg = _TM['Confirm to revoke permission for {0}: {1}?'].format(obj_type.replace('_', ' '), obj_name);
         if (confirm(revoke_msg)){
             var recursive = $('input[name=recursive]:checked').val();
             ajaxActionRevokePermission(url, obj_id, obj_type, field_id, {recursive:recursive});
--- a/kallithea/templates/admin/repo_groups/repo_group_edit_settings.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_group_edit_settings.html	Thu May 27 21:27:37 2021 +0200
@@ -41,7 +41,8 @@
 </div>
 ${h.end_form()}
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $("#parent_group_id").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/repo_groups/repo_groups.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repo_groups/repo_groups.html	Thu May 27 21:27:37 2021 +0200
@@ -30,7 +30,8 @@
         <table class="table" id="datatable_list_wrap" width="100%"></table>
     </div>
 </div>
-<script>'use strict';
+<script>
+  'use strict';
   var data = ${h.js(c.data)};
   $("#datatable_list_wrap").DataTable({
         data: data.records,
--- a/kallithea/templates/admin/repos/repo_add_base.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repos/repo_add_base.html	Thu May 27 21:27:37 2021 +0200
@@ -7,7 +7,7 @@
             <div>
                 ${h.text('repo_name',class_='form-control')}
             </div>
-         </div>
+        </div>
         <div id="remote_clone" class="form-group">
             <label class="control-label" for="clone_uri">${_('Clone remote repository')}:</label>
             <div>
@@ -65,7 +65,8 @@
             </div>
         </div>
 </div>
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         function setCopyPermsOption(group_val){
             if(group_val != "-1"){
--- a/kallithea/templates/admin/repos/repo_creating.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repos/repo_creating.html	Thu May 27 21:27:37 2021 +0200
@@ -42,10 +42,11 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+'use strict';
 (function worker() {
   $.ajax({
-    url: ${h.js(h.url('repo_check_home', repo_name=c.repo_name, repo=c.repo, task_id=c.task_id))},
+    url: ${h.js(h.url('repo_check_home', repo_name=c.repo_name, repo=c.repo))},
     success: function(data) {
       if(data.result === true){
           //redirect to created fork if our ajax loop tells us to do so.
--- a/kallithea/templates/admin/repos/repo_edit_advanced.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repos/repo_edit_advanced.html	Thu May 27 21:27:37 2021 +0200
@@ -9,7 +9,8 @@
 </div>
 ${h.end_form()}
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $("#id_fork_of").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/repos/repo_edit_permissions.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repos/repo_edit_permissions.html	Thu May 27 21:27:37 2021 +0200
@@ -87,10 +87,11 @@
 </div>
 ${h.end_form()}
 
-<script>'use strict';
+<script>
+    'use strict';
     function ajaxActionRevoke(obj_id, obj_type, field_id, obj_name) {
         let url = ${h.js(h.url('edit_repo_perms_revoke',repo_name=c.repo_name))};
-        var revoke_msg = _TM['Confirm to revoke permission for {0}: {1} ?'].format(obj_type.replace('_', ' '), obj_name);
+        var revoke_msg = _TM['Confirm to revoke permission for {0}: {1}?'].format(obj_type.replace('_', ' '), obj_name);
         if (confirm(revoke_msg)){
             ajaxActionRevokePermission(url, obj_id, obj_type, field_id);
         }
--- a/kallithea/templates/admin/repos/repo_edit_settings.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repos/repo_edit_settings.html	Thu May 27 21:27:37 2021 +0200
@@ -49,7 +49,7 @@
                    ${h.text('owner',class_='form-control', placeholder=_('Type name of user'))}
                    <span class="help-block">${_('Change owner of this repository.')}</span>
                 </div>
-             </div>
+            </div>
             <div class="form-group">
                 <label class="control-label" for="repo_description">${_('Description')}:</label>
                 <div>
@@ -91,7 +91,7 @@
                           <span class="help-block">${field.field_desc}</span>
                         %endif
                     </div>
-                 </div>
+                </div>
               %endfor
             %endif
             <div class="form-group">
@@ -103,7 +103,8 @@
     </div>
     ${h.end_form()}
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $('#repo_landing_rev').select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/repos/repos.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/repos/repos.html	Thu May 27 21:27:37 2021 +0200
@@ -29,7 +29,8 @@
     </div>
 
 </div>
-<script>'use strict';
+<script>
+  'use strict';
   var data = ${h.js(c.data)};
   $("#datatable_list_wrap").DataTable({
         data: data.records,
--- a/kallithea/templates/admin/settings/settings_global.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/settings/settings_global.html	Thu May 27 21:27:37 2021 +0200
@@ -1,12 +1,12 @@
 ${h.form(url('admin_settings_global'), method='post')}
     <div class="form">
-             <div class="form-group">
+            <div class="form-group">
                 <label class="control-label" for="title">${_('Site branding')}:</label>
                 <div>
                     ${h.text('title',size=30,class_='form-control')}
                     <span class="help-block">${_('Set a custom title for your Kallithea Service.')}</span>
                 </div>
-             </div>
+            </div>
 
             <div class="form-group">
                 <label class="control-label" for="realm">${_('HTTP authentication realm')}:</label>
@@ -49,6 +49,6 @@
                     ${h.submit('save',_('Save Settings'),class_="btn btn-default")}
                     ${h.reset('reset',_('Reset'),class_="btn btn-default")}
                 </div>
-           </div>
+            </div>
     </div>
 ${h.end_form()}
--- a/kallithea/templates/admin/settings/settings_hooks.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/settings/settings_hooks.html	Thu May 27 21:27:37 2021 +0200
@@ -1,20 +1,8 @@
-<div class="form">
-      <div class="form-group">
-      <h4>${_('Built-in Mercurial Hooks (Read-Only)')}</h4>
-      % for hook in c.hooks:
-        <% input_id = hook.ui_key.replace('.', '_') %>
-            <label class="control-label" for="${input_id}" title="${hook.ui_key}">${hook.ui_key}</label>
-            <div>
-              ${h.text(hook.ui_key,hook.ui_value,id=input_id,size=60,readonly="readonly",class_='form-control')}
-            </div>
-      % endfor
-      </div>
-</div>
-
 % if c.visual.allow_custom_hooks_settings:
 ${h.form(url('admin_settings_hooks'), method='post')}
 <div class="form">
-        <h4>${_('Custom Hooks')}</h4>
+      <div class="form-group">
+        <h4>${_('Custom Global Mercurial Hooks')}</h4>
         <span class="help-block">${_('Hooks can be used to trigger actions on certain events such as push / pull. They can trigger Python functions or external applications.')}</span>
         %for hook in c.custom_hooks:
             <div class="form-group form-inline" id="${'id%s' % hook.ui_id }">
@@ -46,11 +34,22 @@
                 ${h.submit('save',_('Save'),class_="btn btn-default")}
             </div>
         </div>
+      </div>
 </div>
 ${h.end_form()}
+
+<div class="form">
+      <div class="form-group">
+      <h4>${_('Git Hooks')}</h4>
+      <span class="help-block">${h.HTML(_('Kallithea has no support for custom Git hooks. Kallithea will use Git post-receive hooks internally. Installation of these hooks is managed in %s.')) % (h.literal('''<a href="%s">%s</a>''') % (h.url('admin_settings_mapping'), _('Remap and Rescan')))}</span>
+</div>
+
+% else:
+      <h4>${_('Custom Hooks are not enabled')}</h4>
 % endif
 
-<script>'use strict';
+<script>
+'use strict';
 function delete_hook(hook_id, field_id) {
     var sUrl = ${h.js(h.url('admin_settings_hooks_delete'))};
     function success() {
--- a/kallithea/templates/admin/settings/settings_mapping.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/settings/settings_mapping.html	Thu May 27 21:27:37 2021 +0200
@@ -25,14 +25,14 @@
                             ${_('Install Git hooks')}
                         </label>
                     </div>
-                    <span class="help-block">${_("Verify if Kallithea's Git hooks are installed for each repository. Current hooks will be updated to the latest version.")}</span>
+                    <span class="help-block">${_("Install Kallithea's internal hooks for all Git repositories where they are missing or can be upgraded. Existing hooks that don't seem to come from Kallithea will not be touched.")}</span>
                     <div class="checkbox">
                         <label>
                             ${h.checkbox('hooks_overwrite', True)}
-                            ${_('Overwrite existing Git hooks')}
+                            ${_('Install and overwrite Git hooks')}
                         </label>
                     </div>
-                    <span class="help-block">${_("If installing Git hooks, overwrite any existing hooks, even if they do not seem to come from Kallithea. WARNING: This operation will destroy any custom git hooks you may have deployed by hand!")}</span>
+                    <span class="help-block">${_("Install Kallithea's internal hooks for all Git repositories. Existing hooks that don't seem to come from Kallithea will be disabled by renaming to .bak extension.")}</span>
                 </div>
             </div>
 
--- a/kallithea/templates/admin/settings/settings_vcs.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/settings/settings_vcs.html	Thu May 27 21:27:37 2021 +0200
@@ -1,17 +1,17 @@
 ${h.form(url('admin_settings'), method='post')}
     <div class="form">
             <div class="form-group">
-                <label class="control-label">${_('Hooks')}:</label>
+                <label class="control-label">${_('Mercurial Push Hooks')}:</label>
                 <div>
                     <div class="checkbox">
                         <label>
-                            ${h.checkbox('hooks_changegroup_repo_size','True')}
+                            ${h.checkbox('hooks_changegroup_kallithea_repo_size','True')}
                             ${_('Show repository size after push')}
                         </label>
                     </div>
                     <div class="checkbox">
                         <label>
-                            ${h.checkbox('hooks_changegroup_update','True')}
+                            ${h.checkbox('hooks_changegroup_kallithea_update','True')}
                             ${_('Update repository after push (hg update)')}
                         </label>
                     </div>
@@ -26,13 +26,6 @@
                             ${_('Enable largefiles extension')}
                         </label>
                     </div>
-                    <div class="checkbox">
-                        <label>
-                            ${h.checkbox('extensions_hgsubversion','True')}
-                            ${_('Enable hgsubversion extension')}
-                        </label>
-                    </div>
-                    <span class="help-block">${_('Requires hgsubversion library to be installed. Enables cloning of remote Subversion repositories while converting them to Mercurial.')}</span>
                     ##<div class="checkbox">
                     ##    <label>
                     ##        ${h.checkbox('extensions_hggit','True')}
@@ -65,11 +58,12 @@
                     ${h.submit('save',_('Save Settings'),class_="btn btn-default")}
                     ${h.reset('reset',_('Reset'),class_="btn btn-default")}
                 </div>
-           </div>
+            </div>
     </div>
     ${h.end_form()}
 
-    <script>'use strict';
+    <script>
+        'use strict';
         $(document).ready(function(){
             $('#path_unlock').on('click', function(){
                 $('#path_unlock_icon').removeClass('icon-lock');
--- a/kallithea/templates/admin/settings/settings_visual.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/settings/settings_visual.html	Thu May 27 21:27:37 2021 +0200
@@ -95,7 +95,7 @@
                         </label>
                     </div>
                     <span class="help-block">${_('Show public/private icons next to repository names.')}</span>
-                 </div>
+                </div>
             </div>
 
             <div class="form-group">
@@ -120,7 +120,7 @@
                             <li>[see =&gt; URI] <span class="label label-meta" data-tag="see">see =&gt; <a href="#">URI</a> </span></li>
                         </ul>
                     </div>
-                 </div>
+                </div>
             </div>
 
             <div class="form-group">
--- a/kallithea/templates/admin/user_groups/user_group_add.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/user_groups/user_group_add.html	Thu May 27 21:27:37 2021 +0200
@@ -52,7 +52,8 @@
     ${h.end_form()}
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $('#users_group_name').focus();
     });
--- a/kallithea/templates/admin/user_groups/user_group_edit_advanced.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/user_groups/user_group_edit_advanced.html	Thu May 27 21:27:37 2021 +0200
@@ -5,7 +5,7 @@
  elems = [
     (_('Members'), len(c.group_members_obj), ''),
     (_('Created on'), h.fmt_date(c.user_group.created_on), ''),
-    (_('Owner'), h.person(c.user_group.owner), ''),
+    (_('Owner'), c.user_group.owner.username, ''),
     ]
 %>
 %for dt, dd, tt in elems:
--- a/kallithea/templates/admin/user_groups/user_group_edit_perms.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/user_groups/user_group_edit_perms.html	Thu May 27 21:27:37 2021 +0200
@@ -92,10 +92,11 @@
 </div>
 ${h.end_form()}
 
-<script>'use strict';
+<script>
+    'use strict';
     function ajaxActionRevoke(obj_id, obj_type, field_id, obj_name) {
         let url = ${h.js(h.url('edit_user_group_perms_delete', id=c.user_group.users_group_id))};
-        var revoke_msg = _TM['Confirm to revoke permission for {0}: {1} ?'].format(obj_type.replace('_', ' '), obj_name);
+        var revoke_msg = _TM['Confirm to revoke permission for {0}: {1}?'].format(obj_type.replace('_', ' '), obj_name);
         if (confirm(revoke_msg)){
             ajaxActionRevokePermission(url, obj_id, obj_type, field_id);
         }
--- a/kallithea/templates/admin/user_groups/user_group_edit_settings.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/user_groups/user_group_edit_settings.html	Thu May 27 21:27:37 2021 +0200
@@ -48,6 +48,7 @@
                 </div>
     </div>
 ${h.end_form()}
-<script>'use strict';
+<script>
+  'use strict';
   MultiSelectWidget('users_group_members','available_members','edit_users_group');
 </script>
--- a/kallithea/templates/admin/user_groups/user_groups.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/user_groups/user_groups.html	Thu May 27 21:27:37 2021 +0200
@@ -29,7 +29,8 @@
         <table class="table" id="datatable_list_wrap" width="100%"></table>
     </div>
 </div>
-<script>'use strict';
+<script>
+    'use strict';
     var data = ${h.js(c.data)};
     $("#datatable_list_wrap").DataTable({
         data: data.records,
--- a/kallithea/templates/admin/users/user_add.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/users/user_add.html	Thu May 27 21:27:37 2021 +0200
@@ -84,7 +84,8 @@
     ${h.end_form()}
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $('#username').focus();
     });
--- a/kallithea/templates/admin/users/user_edit_api_keys.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/users/user_edit_api_keys.html	Thu May 27 21:27:37 2021 +0200
@@ -77,7 +77,8 @@
     ${h.end_form()}
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $("#lifetime").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/admin/users/users.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/admin/users/users.html	Thu May 27 21:27:37 2021 +0200
@@ -28,7 +28,8 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     var data = ${h.js(c.data)};
     $("#datatable_list_wrap").DataTable({
         data: data.records,
--- a/kallithea/templates/base/base.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/base/base.html	Thu May 27 21:27:37 2021 +0200
@@ -23,7 +23,7 @@
             <a class="navbar-link" href="${h.url('kallithea_project_url')}" target="_blank">Kallithea</a>,
         %endif
         which is
-        <a class="navbar-link" href="${h.canonical_url('about')}#copyright">&copy; 2010&ndash;2020 by various authors &amp; licensed under GPLv3</a>.
+        <a class="navbar-link" href="${h.canonical_url('about')}#copyright">&copy; 2010&ndash;2021 by various authors &amp; licensed under GPLv3</a>.
         %if c.issues_url:
             &ndash; <a class="navbar-link" href="${c.issues_url}" target="_blank">${_('Support')}</a>
         %endif
@@ -81,11 +81,11 @@
   </ul>
 </%def>
 
-<%def name="repolabel(repo)">
-  %if h.is_hg(repo):
+<%def name="repolabel(repo_type)">
+  %if repo_type == 'hg':
     <span class="label label-repo" title="${_('Mercurial repository')}">hg</span>
   %endif
-  %if h.is_git(repo):
+  %if repo_type == 'git':
     <span class="label label-repo" title="${_('Git repository')}">git</span>
   %endif
 </%def>
@@ -97,7 +97,7 @@
     <div class="container-fluid">
     <div class="navbar-header">
       <div class="navbar-brand">
-        ${repolabel(c.db_repo)}
+        ${repolabel(c.db_repo.repo_type)}
 
         ## public/private
         %if c.db_repo.private:
@@ -171,13 +171,14 @@
                   <li><a href="${h.url('repo_fork_home',repo_name=c.repo_name)}"><i class="icon-fork"></i>${_('Fork')}</a></li>
                   <li><a href="${h.url('pullrequest_home',repo_name=c.repo_name)}"><i class="icon-git-pull-request"></i>${_('Create Pull Request')}</a></li>
               %endif
-             </ul>
+          </ul>
         </li>
     </ul>
     </div>
     </div>
   </nav>
-  <script>'use strict';
+  <script>
+    'use strict';
     $(document).ready(function() {
       var bcache = {};
 
@@ -399,7 +400,8 @@
     </li>
   </ul>
 
-    <script>'use strict';
+    <script>
+        'use strict';
         $(document).ready(function(){
             var visual_show_public_icon = ${h.js(c.visual.show_public_icon)};
             var cache = {}
@@ -527,7 +529,8 @@
         </div>
     </div>
 
-    <script>'use strict';
+    <script>
+      'use strict';
       $(document).ready(function(){
           activate_parent_child_links();
       });
--- a/kallithea/templates/base/flash_msg.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/base/flash_msg.html	Thu May 27 21:27:37 2021 +0200
@@ -9,7 +9,8 @@
             </div>
         % endfor
     % endif
-    <script>'use strict';
+    <script>
+    'use strict';
     if (typeof jQuery != 'undefined') {
         $(".alert").alert();
     }
--- a/kallithea/templates/base/perms_summary.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/base/perms_summary.html	Thu May 27 21:27:37 2021 +0200
@@ -97,7 +97,8 @@
         %endif
      %endfor
 </div>
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         function show_empty(section){
             var visible = $('.section_{0} tr.perm_row:visible'.format(section)).length;
--- a/kallithea/templates/base/root.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/base/root.html	Thu May 27 21:27:37 2021 +0200
@@ -21,7 +21,8 @@
         <%block name="css_extra"/>
 
         ## JAVASCRIPT ##
-        <script>'use strict';
+        <script>
+            'use strict';
             ## JS translations map
             var TRANSLATION_MAP = {
                 'Cancel': ${h.jshtml(_("Cancel"))},
@@ -45,7 +46,7 @@
                 'No revisions': ${h.jshtml(_('No revisions'))},
                 'Type name of user or member to grant permission': ${h.jshtml(_('Type name of user or member to grant permission'))},
                 'Failed to revoke permission': ${h.jshtml(_('Failed to revoke permission'))},
-                'Confirm to revoke permission for {0}: {1} ?': ${h.jshtml(_('Confirm to revoke permission for {0}: {1} ?'))},
+                'Confirm to revoke permission for {0}: {1}?': ${h.jshtml(_('Confirm to revoke permission for {0}: {1}?'))},
                 'Enabled': ${h.jshtml(_('Enabled'))},
                 'Disabled': ${h.jshtml(_('Disabled'))},
                 'Select changeset': ${h.jshtml(_('Select changeset'))},
@@ -77,7 +78,8 @@
         <script src="${h.url('/js/base.js', ver=c.kallithea_version)}"></script>
         ## EXTRA FOR JS
         <%block name="js_extra"/>
-        <script>'use strict';
+        <script>
+            'use strict';
             $(document).ready(function(){
               tooltip_activate();
               show_more_event();
--- a/kallithea/templates/changelog/changelog.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/changelog/changelog.html	Thu May 27 21:27:37 2021 +0200
@@ -18,7 +18,7 @@
      - /${c.changelog_for_path}
     %endif
     %if c.revision:
-    @ ${h.short_id(c.first_revision.raw_id)}
+    @ ${c.first_revision.short_id}
     %endif
     - ${ungettext('showing %d out of %d revision', 'showing %d out of %d revisions', size) % (size, c.total_cs)}
 </%def>
@@ -81,7 +81,8 @@
                 ${c.cs_pagination.pager()}
 
         <script src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
-        <script>'use strict';
+        <script>
+            'use strict';
             var jsdata = ${h.js(c.jsdata)};
             var graph = new BranchRenderer('graph_canvas', 'graph_content', 'chg_');
 
--- a/kallithea/templates/changelog/changelog_table.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/changelog/changelog_table.html	Thu May 27 21:27:37 2021 +0200
@@ -110,7 +110,8 @@
     </tbody>
     </table>
 
-<script>'use strict';
+<script>
+  'use strict';
   $(document).ready(function() {
     $('#changesets .expand_commit').on('click',function(){
       $(this).next('.mid').find('.message > div').toggleClass('hidden');
--- a/kallithea/templates/changeset/changeset.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/changeset/changeset.html	Thu May 27 21:27:37 2021 +0200
@@ -22,7 +22,8 @@
   <div class="panel-heading clearfix">
     ${self.breadcrumbs()}
   </div>
-  <script>'use strict';
+  <script>
+    'use strict';
     var AJAX_COMMENT_URL = ${h.js(url('changeset_comment',repo_name=c.repo_name,revision=c.changeset.raw_id))};
     var AJAX_COMMENT_DELETE_URL = ${h.js(url('changeset_comment_delete',repo_name=c.repo_name,comment_id='__COMMENT_ID__'))};
   </script>
@@ -47,8 +48,8 @@
                   <a href="${h.url('changeset_download_home',repo_name=c.repo_name,revision=c.changeset.raw_id,diff='download')}"
                      data-toggle="tooltip"
                      title="${_('Download diff')}"><i class="icon-floppy"></i></a>
-                  ${c.ignorews_url(request.GET)}
-                  ${c.context_url(request.GET)}
+                  ${h.ignore_whitespace_link(request.GET)}
+                  ${h.increase_context_link(request.GET)}
                 </div>
         </div>
         <div class="panel-body">
@@ -138,8 +139,8 @@
               % else:
                   ${ungettext('%s file changed with %s insertions and %s deletions', '%s files changed with %s insertions and %s deletions', len(file_diff_data)) % (len(file_diff_data), c.lines_added, c.lines_deleted)}:
               %endif
-              </div>
-              <div class="cs_files">
+            </div>
+            <div class="cs_files">
                 %for fid, url_fid, op, a_path, path, diff, stats in file_diff_data:
                     <div class="cs_${op} clearfix">
                       <span class="node">
@@ -181,10 +182,11 @@
     ## main comment form and it status
     ${comment.comments()}
 
-    </div>
+  </div>
 
     ## FORM FOR MAKING JS ACTION AS CHANGESET COMMENTS
-    <script>'use strict';
+    <script>
+      'use strict';
       $(document).ready(function(){
           $('.code-difftable').on('click', '.add-bubble', function(){
               show_comment_form($(this));
@@ -200,5 +202,5 @@
 
     </script>
 
-  </div>
+</div>
 </%def>
--- a/kallithea/templates/changeset/changeset_file_comment.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/changeset/changeset_file_comment.html	Thu May 27 21:27:37 2021 +0200
@@ -192,7 +192,8 @@
   </div>
 </div>
 
-<script>'use strict';
+<script>
+'use strict';
 
 $(document).ready(function () {
 
--- a/kallithea/templates/changeset/changeset_range.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/changeset/changeset_range.html	Thu May 27 21:27:37 2021 +0200
@@ -35,7 +35,7 @@
                     %if c.visual.use_gravatar:
                     <td>${h.gravatar_div(h.email_or_none(cs.author), size=14)}</td>
                     %endif
-                    <td>${h.link_to(h.short_id(cs.raw_id),h.url('changeset_home',repo_name=c.cs_repo.repo_name,revision=cs.raw_id))}</td>
+                    <td>${h.link_to(cs.short_id,h.url('changeset_home',repo_name=c.cs_repo.repo_name,revision=cs.raw_id))}</td>
                     <td class="author">${h.person(cs.author)}</td>
                     <td><span data-toggle="tooltip" title="${h.age(cs.date)}">${cs.date}</span></td>
                     <td>
@@ -79,7 +79,7 @@
                 %if len(cs.parents)>1:
                 <span class="label label-merge">${_('Merge')}</span>
                 %endif
-                %if h.is_hg(c.db_repo_scm_instance):
+                %if c.db_repo_scm_instance.alias == 'hg':
                   %for book in cs.bookmarks:
                   <span class="label label-bookmark" title="${_('Bookmark %s') % book}">
                      ${h.link_to(book,h.url('changeset_home',repo_name=c.cs_repo.repo_name,revision=cs.raw_id))}
--- a/kallithea/templates/changeset/diff_block.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/changeset/diff_block.html	Thu May 27 21:27:37 2021 +0200
@@ -61,16 +61,16 @@
                       <i class="icon-file-code"></i></a>
                   <a href="${h.url('files_diff_2way_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='diff',fulldiff=1)}" data-toggle="tooltip" title="${_('Show full side-by-side diff for this file')}">
                       <i class="icon-docs"></i></a>
-                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='raw')}" data-toggle="tooltip" title="${_('Raw diff')}">
+                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='raw')}" data-toggle="tooltip" title="${_('Raw diff for this file')}">
                       <i class="icon-diff"></i></a>
-                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='download')}" data-toggle="tooltip" title="${_('Download diff')}">
+                  <a href="${h.url('files_diff_home',repo_name=cs_repo_name,f_path=cs_filename,diff2=cs_rev,diff1=a_rev,diff='download')}" data-toggle="tooltip" title="${_('Download diff for this file')}">
                       <i class="icon-floppy"></i></a>
-                  ${c.ignorews_url(request.GET, url_fid)}
-                  ${c.context_url(request.GET, url_fid)}
+                  ${h.ignore_whitespace_link(request.GET, id_fid)}
+                  ${h.increase_context_link(request.GET, id_fid)}
                 </div>
                 <div class="pull-right">
                     ${_('Show inline comments')}
-                    ${h.checkbox('checkbox-show-inline-' + id_fid, checked="checked",class_="show-inline-comments",**{'data-id_for':id_fid})}
+                    ${h.checkbox('checkbox-show-inline-' + id_fid, checked="checked",class_="show-inline-comments",**{'data-for':id_fid})}
                 </div>
         </div>
         <div class="no-padding panel-body" data-f_path="${cs_filename}">
@@ -96,7 +96,8 @@
 </%def>
 
 <%def name="diff_block_js()">
-<script>'use strict';
+<script>
+'use strict';
 $(document).ready(function(){
     $('.btn-image-diff-show').click(function(){
         $('.btn-image-diff-show').hide();
@@ -137,7 +138,7 @@
         if(target == null){
             target = this;
         }
-        var boxid = $(target).data('id_for');
+        var boxid = $(target).data('for');
         if(target.checked){
             $('#{0} .inline-comments'.format(boxid)).show();
         }else{
--- a/kallithea/templates/changeset/patch_changeset.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/changeset/patch_changeset.html	Thu May 27 21:27:37 2021 +0200
@@ -1,4 +1,4 @@
-%if h.is_hg(c.db_repo_scm_instance):
+%if c.db_repo_scm_instance.alias == 'hg':
 # ${c.db_repo_scm_instance.alias.upper()} changeset patch
 # User ${c.changeset.author |n}
 # Date ${c.changeset.date}
@@ -6,7 +6,7 @@
 ${c.parent_tmpl}
 ${c.changeset.message |n}
 
-%elif h.is_git(c.db_repo_scm_instance):
+%elif c.db_repo_scm_instance.alias == 'git':
 From ${c.changeset.raw_id} ${c.changeset.date}
 From: ${c.changeset.author |n}
 Date: ${c.changeset.date}
--- a/kallithea/templates/compare/compare_cs.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/compare/compare_cs.html	Thu May 27 21:27:37 2021 +0200
@@ -66,7 +66,8 @@
 <script src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
 %endif
 
-<script>'use strict';
+<script>
+    'use strict';
     var jsdata = ${h.js(c.jsdata)};
     var graph = new BranchRenderer('graph_canvas', 'graph_content_pr', 'chg_');
 
--- a/kallithea/templates/compare/compare_diff.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/compare/compare_diff.html	Thu May 27 21:27:37 2021 +0200
@@ -30,11 +30,14 @@
                 ## divs are "inline-block" and cannot have whitespace between them.
                 <span>
                     ${h.hidden('compare_org')}
-                </span><span>
+                </span><!--
+             --><span>
                     <i class="icon-right"></i>
-                </span><span>
+                </span><!--
+             --><span>
                     ${h.hidden('compare_other')}
-                </span><span>
+                </span><!--
+             --><span>
                     %if not c.compare_home:
                         <a class="btn btn-default btn-sm" href="${c.swap_url}"><i class="icon-arrows-cw"></i>${_('Swap')}</a>
                     %endif
@@ -60,9 +63,8 @@
                 % else:
                     ${ungettext('%s file changed with %s insertions and %s deletions','%s files changed with %s insertions and %s deletions', len(c.file_diff_data)) % (len(c.file_diff_data),c.lines_added,c.lines_deleted)}:
                 %endif
-
-                ${c.ignorews_url(request.GET)}
-                ${c.context_url(request.GET)}
+                ${h.ignore_whitespace_link(request.GET)}
+                ${h.increase_context_link(request.GET)}
                 </h5>
                 <div class="cs_files">
                   %if not c.file_diff_data:
@@ -98,7 +100,8 @@
     </div>
 
 </div>
-    <script>'use strict';
+    <script>
+'use strict';
 
    $(document).ready(function(){
     var cache = {};
--- a/kallithea/templates/data_table/_dt_elements.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/data_table/_dt_elements.html	Thu May 27 21:27:37 2021 +0200
@@ -14,7 +14,7 @@
     %>
   <div class="dt_repo ${'dt_repo_pending' if rstate == 'repo_state_pending' else ''}">
     ${base.repolabel(rtype)}
-    <a href="${h.url('summary_home', repo_name=name)}">
+    <a href="${webutils.url('summary_home', repo_name=name)}">
         ${get_name(name)}
     </a>
     %if private and c.visual.show_private_icon:
@@ -23,7 +23,7 @@
       <i class="icon-globe" title="${_('Public repository')}"></i>
     %endif
     %if fork_of:
-      <a href="${h.url('summary_home',repo_name=fork_of.repo_name)}"><i class="icon-fork"></i></a>
+      <a href="${webutils.url('summary_home',repo_name=fork_of.repo_name)}"><i class="icon-fork"></i></a>
     %endif
     %if rstate == 'repo_state_pending':
       <i class="icon-wrench" title="${_('Repository creation in progress...')}"></i>
@@ -38,12 +38,12 @@
 </%def>
 
 <%def name="last_change(last_change)">
-  <span data-toggle="tooltip" title="${h.fmt_date(last_change)}" date="${last_change}">${h.age(last_change)}</span>
+  <span data-toggle="tooltip" title="${webutils.fmt_date(last_change)}" date="${last_change}">${webutils.age(last_change)}</span>
 </%def>
 
 <%def name="revision(name,rev,tip,author,last_msg)">
   %if rev >= 0:
-      <a data-toggle="popover" title="${author | entity}" data-content="${last_msg | entity}" class="changeset_hash" href="${h.url('changeset_home',repo_name=name,revision=tip)}">${'r%s:%s' % (rev,h.short_id(tip))}</a>
+      <a data-toggle="popover" title="${author | entity}" data-content="${last_msg | entity}" class="changeset_hash" href="${webutils.url('changeset_home',repo_name=name,revision=tip)}">${'r%s:%s' % (rev,webutils.short_id(tip))}</a>
   %else:
       ${_('No changesets yet')}
   %endif
@@ -51,30 +51,30 @@
 
 <%def name="rss(name)">
   %if request.authuser.username != 'default':
-    <a title="${_('Subscribe to %s rss feed')% name}" href="${h.url('rss_feed_home',repo_name=name,api_key=request.authuser.api_key)}"><i class="icon-rss-squared"></i></a>
+    <a title="${_('Subscribe to %s rss feed')% name}" href="${webutils.url('rss_feed_home',repo_name=name,api_key=request.authuser.api_key)}"><i class="icon-rss-squared"></i></a>
   %else:
-    <a title="${_('Subscribe to %s rss feed')% name}" href="${h.url('rss_feed_home',repo_name=name)}"><i class="icon-rss-squared"></i></a>
+    <a title="${_('Subscribe to %s rss feed')% name}" href="${webutils.url('rss_feed_home',repo_name=name)}"><i class="icon-rss-squared"></i></a>
   %endif
 </%def>
 
 <%def name="atom(name)">
   %if request.authuser.username != 'default':
-    <a title="${_('Subscribe to %s atom feed')% name}" href="${h.url('atom_feed_home',repo_name=name,api_key=request.authuser.api_key)}"><i class="icon-rss-squared"></i></a>
+    <a title="${_('Subscribe to %s atom feed')% name}" href="${webutils.url('atom_feed_home',repo_name=name,api_key=request.authuser.api_key)}"><i class="icon-rss-squared"></i></a>
   %else:
-    <a title="${_('Subscribe to %s atom feed')% name}" href="${h.url('atom_feed_home',repo_name=name)}"><i class="icon-rss-squared"></i></a>
+    <a title="${_('Subscribe to %s atom feed')% name}" href="${webutils.url('atom_feed_home',repo_name=name)}"><i class="icon-rss-squared"></i></a>
   %endif
 </%def>
 
 <%def name="repo_actions(repo_name)">
-      <a href="${h.url('edit_repo',repo_name=repo_name)}" title="${_('Edit')}" class="btn btn-default btn-xs">
+      <a href="${webutils.url('edit_repo',repo_name=repo_name)}" title="${_('Edit')}" class="btn btn-default btn-xs">
         <i class="icon-pencil"></i>${_('Edit')}
       </a>
-      ${h.form(h.url('delete_repo', repo_name=repo_name))}
+      ${webutils.form(webutils.url('delete_repo', repo_name=repo_name))}
         <button name="${'remove_%s' % repo_name}" class="btn btn-default btn-xs"
             onclick="return confirm('${_('Confirm to delete this repository: %s') % repo_name}');">
           <i class="icon-trashcan"></i>${_('Delete')}
         </button>
-      ${h.end_form()}
+      ${webutils.end_form()}
 </%def>
 
 <%def name="repo_state(repo_state)">
@@ -88,62 +88,62 @@
 </%def>
 
 <%def name="user_actions(user_id, username)">
-   <a href="${h.url('edit_user',id=user_id)}" title="${_('Edit')}" class="btn btn-default btn-xs">
+   <a href="${webutils.url('edit_user',id=user_id)}" title="${_('Edit')}" class="btn btn-default btn-xs">
      <i class="icon-pencil"></i>${_('Edit')}
    </a>
-  ${h.form(h.url('delete_user', id=user_id))}
+  ${webutils.form(webutils.url('delete_user', id=user_id))}
     <button id="${'remove_user_%s' % user_id}" name="${'remove_user_%s' % repo_name}" class="btn btn-default btn-xs" title="${_('Delete')}"
         onclick="return confirm('${_('Confirm to delete this user: %s') % username}');">
       <i class="icon-trashcan"></i>${_('Delete')}
     </button>
-  ${h.end_form()}
+  ${webutils.end_form()}
 </%def>
 
 <%def name="user_group_actions(user_group_id, user_group_name)">
-    <a href="${h.url('edit_users_group', id=user_group_id)}" title="${_('Edit')}" class="btn btn-default btn-xs">
+    <a href="${webutils.url('edit_users_group', id=user_group_id)}" title="${_('Edit')}" class="btn btn-default btn-xs">
       <i class="icon-pencil"></i>${_('Edit')}
     </a>
-    ${h.form(h.url('delete_users_group', id=user_group_id))}
+    ${webutils.form(webutils.url('delete_users_group', id=user_group_id))}
       <button id="${'remove_group_%s' % user_group_id}" name="${'remove_user_%s' % repo_name}" class="btn btn-default btn-xs" title="${_('Delete')}"
           onclick="return confirm('${_('Confirm to delete this user group: %s') % user_group_name}');">
         <i class="icon-trashcan"></i>${_('Delete')}
       </button>
-    ${h.end_form()}
+    ${webutils.end_form()}
 </%def>
 
 <%def name="group_name_html(group_name,name)">
   <div class="dt_repo">
     <i class="icon-folder"></i>
-    <a href="${h.url('repos_group_home',group_name=group_name)}">${name}</a>
+    <a href="${webutils.url('repos_group_home',group_name=group_name)}">${name}</a>
   </div>
 </%def>
 
 <%def name="repo_group_actions(repo_group_id, repo_group_name, gr_count)">
-    <a href="${h.url('edit_repo_group',group_name=repo_group_name)}" title="${_('Edit')}" class="btn btn-default btn-xs">
+    <a href="${webutils.url('edit_repo_group',group_name=repo_group_name)}" title="${_('Edit')}" class="btn btn-default btn-xs">
       <i class="icon-pencil"></i>${_('Edit')}
     </a>
-    ${h.form(h.url('delete_repo_group', group_name=repo_group_name))}
+    ${webutils.form(webutils.url('delete_repo_group', group_name=repo_group_name))}
       <button id="${'remove_%s' % repo_group_name}" name="${'remove_%s' % repo_group_name}" class="btn btn-default btn-xs" title="${_('Delete')}"
           onclick="return confirm('${ungettext('Confirm to delete this group: %s with %s repository','Confirm to delete this group: %s with %s repositories',gr_count) % (repo_group_name, gr_count)}')">
         <i class="icon-trashcan"></i>${_('Delete')}
       </button>
-    ${h.end_form()}
+    ${webutils.end_form()}
 </%def>
 
 <%def name="user_name(user_id, username)">
-    ${h.link_to(username,h.url('edit_user', id=user_id))}
+    ${webutils.link_to(username,webutils.url('edit_user', id=user_id))}
 </%def>
 
 <%def name="repo_group_name(repo_group_name, children_groups)">
   <div class="text-nowrap">
-  <a href="${h.url('repos_group_home',group_name=repo_group_name)}">
-    <i class="icon-folder" title="${_('Repository group')}"></i>${h.literal(' &raquo; ').join(children_groups)}</a>
+  <a href="${webutils.url('repos_group_home',group_name=repo_group_name)}">
+    <i class="icon-folder" title="${_('Repository group')}"></i>${webutils.literal(' &raquo; ').join(children_groups)}</a>
   </div>
 </%def>
 
 <%def name="user_group_name(user_group_id, user_group_name)">
   <div class="text-nowrap">
-  <a href="${h.url('edit_users_group', id=user_group_id)}">
+  <a href="${webutils.url('edit_users_group', id=user_group_id)}">
     <i class="icon-users" title="${_('User group')}"></i>${user_group_name}</a>
   </div>
 </%def>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/button.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,28 @@
+<%page args="url,title='',padding_top=True,padding_bottom=True" />\
+##<!-- button -->
+<center>
+    <table cellspacing="0" cellpadding="0" style="margin-left:auto;margin-right:auto">
+        %if padding_top:
+        <tr>
+            <td height="25px" style="height:25px"></td>
+        </tr>
+        %endif
+        <tr>
+            <td style="border-collapse:collapse;border-radius:2px;text-align:center;display:block;border:solid 1px ${color_button};padding:11px 20px 11px 20px">
+                <a href="${url}" style="text-decoration:none;display:block" target="_blank">
+                    <center>
+                        <font size="3">
+                            <span style="${sans_style};font-weight:700;font-size:15px;line-height:14px;color:${color_button};white-space:nowrap;vertical-align:middle">${_(title)}</span>
+                        </font>
+                    </center>
+                </a>
+            </td>
+        </tr>
+        %if padding_bottom:
+        <tr>
+            <td height="25px" style="height:25px"></td>
+        </tr>
+        %endif
+    </table>
+</center>
+##<!-- /button -->
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/button.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,3 @@
+<%page args="url,title" />\
+
+${title|n}: ${url}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/changeset_comment.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,40 @@
+<%inherit file="main.html"/>\
+\
+<%block name="header">\
+<% title = _('Mention in Comment on Changeset "%s"') % webutils.shorter(message, 200, firstline=True) if is_mention else _('Comment on Changeset "%s"') % webutils.shorter(message, 200, firstline=True) %>\
+<%include file="header.html" args="title=title,link=cs_comment_url"/>\
+</%block>\
+\
+<table cellpadding="0" cellspacing="0" border="0" width="100%">
+    <tr>
+        <td>
+<%include file="comment.html" args="text=body,author=cs_comment_user,status=status_change"/>\
+        </td>
+    </tr>
+    <tr>
+        <td height="30px" style="height:30px"></td>
+    </tr>
+    <tr>
+        <td>
+            <div>
+                ${_('Changeset on')}
+                <a style="${link_text_style}"
+                   href="${cs_target_repo}">${cs_target_repo}</a>
+                ${_('branch')}
+                <span style="${data_style}">${branch}</span>:
+            </div>
+            <div>
+                "<a style="${link_style}"
+                   href="${cs_url}">${webutils.shorter(message, 60, firstline=True)}</a>"
+                ${_('by')}
+                <span style="${data_style}">${cs_author.full_name_and_username}</span>.
+            </div>
+        </td>
+    </tr>
+    <tr>
+        <td>
+<% title = _('View Comment') %>\
+<%include file="button.html" args="url=cs_comment_url,title=title,padding_bottom=False"/>\
+        </td>
+    </tr>
+</table>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/changeset_comment.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,17 @@
+<%block name="header">\
+<% title = _('Mention in Comment on Changeset "%s"') % webutils.shorter(message, 200, firstline=True) if is_mention else _('Comment on Changeset "%s"') % webutils.shorter(message, 200, firstline=True) %>\
+<%include file="header.txt" args="title=title,link=cs_comment_url"/>\
+</%block>\
+
+<%include file="comment.txt" args="text=body,author=cs_comment_user,status=status_change"/>\
+
+${_('Changeset on')|n} \
+${cs_target_repo|n} \
+${_('branch')|n} \
+${branch|n}:
+"${webutils.shorter(message, 60, firstline=True)|n}" \
+${_('by')|n} \
+${cs_author.full_name_and_username|n}.
+
+<% title = _('View Comment') %>\
+<%include file="button.txt" args="url=cs_comment_url,title=title"/>\
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/comment.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,57 @@
+<%page args="author,text,status,close=False" />\
+\
+##<!-- comment ->
+<table cellpadding="0" cellspacing="0" width="100%" border="0" bgcolor="${color_background_grey}" style="border:1px solid ${color_border};border-radius:4px">
+    <tr>
+        <td height="10px" style="height:10px" colspan="3"></td>
+    </tr>
+    <tr>
+        <td width="20px" style="width:20px"></td>
+        <td>
+            <div style="${emph_style}">${author}</div>
+        </td>
+        <td width="20px" style="width:20px"></td>
+    </tr>
+    <tr>
+        <td height="10px" style="height:10px" colspan="3" style="border-bottom:1px solid ${color_border}"></td>
+    </tr>
+    %if status or close:
+        <tr>
+            <td height="10px" style="height:10px" colspan="3"></td>
+        </tr>
+        <tr>
+            <td width="20px" style="width:20px"></td>
+            <td>
+                %if status:
+                    <div style="font-weight:600">
+                        ${_('Status change:')}
+                        ${status}
+                    </div>
+                %endif
+                %if close:
+                    <div style="font-weight:600">
+                        ${_('The pull request has been closed.')}
+                    </div>
+                %endif
+            </td>
+            <td width="20px" style="width:20px"></td>
+        </tr>
+        <tr>
+            <td height="10px" style="height:10px" colspan="3" style="border-bottom:1px solid ${color_border}"></td>
+        </tr>
+    %endif
+    <tr>
+        <td height="10px" style="height:10px" colspan="3"></td>
+    </tr>
+    <tr>
+        <td width="20px" style="width:20px"></td>
+        <td>
+            <div style="${comment_style}">${text}</div>
+        </td>
+        <td width="20px" style="width:20px"></td>
+    </tr>
+    <tr>
+        <td height="10px" style="height:10px" colspan="3"></td>
+    </tr>
+</table>
+##<!-- /comment ->
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/comment.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,15 @@
+<%page args="author,text,status,close=False" />\
+${author|n}:
+
+%if status:
+${_('Status change:')|n} \
+${status|n}
+
+%endif
+%if close:
+${_('The pull request has been closed.')|n}
+
+%endif
+${text|n}
+
+## Trailing empty line
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/default.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,12 @@
+<%inherit file="main.html"/>\
+\
+<%block name="header">\
+<% title = _('Message') %>\
+<%include file="header.html" args="title=title,link=None"/>\
+</%block>\
+\
+<table cellpadding="0" cellspacing="0" border="0" width="100%">
+    <tr>
+        <td style="${comment_style}">${body}</td>
+    </tr>
+</table>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/default.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,4 @@
+<%block name="header">\
+</%block>\
+\
+${body|n}\
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/header.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,25 @@
+<%page args="title,link" />\
+\
+##<!-- header -->
+<table bgcolor="${color_background_grey}" width="100%" cellpadding="0" cellspacing="0"
+       style="border-bottom:1px solid ${color_border}">
+    <tr>
+        <td height="20px" style="height:20px" colspan="3"></td>
+    </tr>
+    <tr>
+        <td width="30px" style="width:30px"></td>
+        <td style="${sans_style};font-size:19px;line-height:24px">
+        %if link is not None:
+            <a style="text-decoration:none;${emph_style}" href="${link}"
+               target="_blank">${title}</a>
+        %else:
+            <span style="${emph_style}">${title}</span>
+        %endif
+        </td>
+        <td width="30px" style="width:30px"></td>
+    </tr>
+    <tr>
+        <td height="20px" style="height:20px" colspan="3"></td>
+    </tr>
+</table>
+##<!-- /header -->
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/header.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,8 @@
+<%page args="title,link" />\
+%if link is not None:
+${link}
+
+%endif
+${title|n}
+
+## Trailing empty line
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/main.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,42 @@
+<!doctype html>
+<html lang="en">
+<head>
+    <title></title>
+    <meta name="viewport" content="width=device-width">
+    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+</head>
+<body>
+<table align="center" cellpadding="0" cellspacing="0" border="0" style="min-width:348px;max-width:800px;${default_style}">
+    <tr>
+        <td width="30px" style="width:30px"></td>
+        <td>
+            <table width="100%" cellpadding="0" cellspacing="0" border="0"
+                   style="table-layout:fixed;${sans_style};border:1px solid ${color_border}">
+                <tr><td width="30px" style="width:30px"></td><td></td><td width="30px" style="width:30px"></td></tr>
+                <tr>
+                    <td colspan="3">
+<%block name="header"/>\
+                    </td>
+                </tr>
+                <tr>
+                    <td height="30px" style="height:30px" colspan="3"></td>
+                </tr>
+                <tr>
+                    <td></td>
+                    <td>
+                        ##<!-- body -->
+${self.body()}\
+                        ##<!-- /body -->
+                    </td>
+                    <td></td>
+                </tr>
+                <tr>
+                    <td height="30px" style="height:30px" colspan="3"></td>
+                </tr>
+            </table>
+        </td>
+        <td width="30px" style="width:30px"></td>
+    </tr>
+</table>
+</body>
+</html>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/password_reset.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,48 @@
+<%inherit file="main.html"/>\
+\
+<%block name="header">\
+<% title = _('Password Reset Request') %>\
+<%include file="header.html" args="title=title,link=None"/>\
+</%block>\
+\
+<table cellpadding="0" cellspacing="0" border="0" width="100%" style="table-layout:fixed;word-wrap:break-word;">
+    <tr>
+        <td>${_('Hello %s') % user},</td>
+    </tr>
+    <tr>
+        <td height="10px" style="height:10px"></td>
+    </tr>
+    <tr>
+        <td>
+            ${_('We have received a request to reset the password for your account.')}
+        </td>
+    </tr>
+    <tr>
+        <td height="10px" style="height:10px"></td>
+    </tr>
+    <tr>
+        <td>
+            %if reset_token is None:
+                <div>${_('This account is however managed outside this system and the password cannot be changed here.')}</div>
+            %else:
+                <div>
+                    ${_('To set a new password, click the following link')}:
+                    <br/>
+                    <a style="${link_style}" href="${reset_url}"
+                        target="_blank">${reset_url}</a>
+                    <br/>
+                    ${_("Should you not be able to use the link above, please type the following code into the password reset form")}:
+                    <code>${reset_token}</code>
+                </div>
+            %endif
+        </td>
+    </tr>
+    <tr>
+        <td height="10px" style="height:10px"></td>
+    </tr>
+    <tr>
+        <td>
+            ${_("If it weren't you who requested the password reset, just disregard this message.")}
+        </td>
+    </tr>
+</table>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/password_reset.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,21 @@
+<%block name="header">\
+<% title = _('Password Reset Request') %>\
+<%include file="header.txt" args="title=title,link=None"/>\
+</%block>\
+\
+${_('Hello %s') % user|n},
+
+${_('We have received a request to reset the password for your account.')|n}
+
+%if reset_token is None:
+${_('This account is however managed outside this system and the password cannot be changed here.')|n}
+%else:
+${_('To set a new password, click the following link')|n}:
+
+${reset_url|n}
+
+${_("Should you not be able to use the link above, please type the following code into the password reset form")|n}:
+${reset_token|n}
+%endif
+
+${_("If it weren't you who requested the password reset, just disregard this message.")|n}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/pull_request.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,89 @@
+<%inherit file="main.html"/>\
+\
+<%block name="header">\
+<% title = _('Mention on Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) if is_mention else _('Added as Reviewer of Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) %>\
+<%include file="header.html" args="title=title,link=pr_url"/>\
+</%block>\
+\
+<table cellpadding="0" cellspacing="0" border="0" width="100%">
+    <tr>
+        <td>
+            <div>
+                ${_('Pull request')}
+                <a style="${link_style}"
+                   href="${pr_url}">${pr_nice_id} "${pr_title}"</a>
+                ${_('by')}
+                <span style="${data_style}">${pr_owner.full_name_and_username}</span>.
+            </div>
+            <div>
+                ${_('from')}
+                <a style="${link_text_style}"
+                   href="${pr_source_repo}">${pr_source_repo}</a>
+                ${_('branch')}
+                <span style="${data_style}">${pr_source_branch}</span>
+                <br/>
+                ${_('to')}
+                <a style="${link_text_style}"
+                   href="${pr_target_repo}">${pr_target_repo}</a>
+                ${_('branch')}
+                <span style="${data_style}">${pr_target_branch}</span>
+            </div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+    <tr>
+        <td>
+            <div>
+                ${_('Description')}:
+            </div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+    <tr>
+        <td>
+            <table cellpadding="0" cellspacing="0" width="100%" border="0" bgcolor="${color_background_grey}" style="border:1px solid ${color_border};border-radius:4px">
+                <tr>
+                    <td height="10px" style="height:10px" colspan="3"></td>
+                </tr>
+                <tr>
+                    <td width="20px" style="width:20px"></td>
+                    <td>
+                        <div style="${comment_style}">${body}</div>
+                    </td>
+                    <td width="20px" style="width:20px"></td>
+                </tr>
+                <tr>
+                    <td height="10px" style="height:10px" colspan="3"></td>
+                </tr>
+            </table>
+        </td>
+    </tr>
+    <tr><td height="15px" style="height:15px"></td></tr>
+    <tr>
+        <td>
+            <div>${_('Changesets')}:</div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+
+    <tr>
+        <td style="${sans_style}">
+            <ul style="color:${color_link};padding-left:15px;margin:0">
+                %for revision, desc in pr_revisions:
+                    <li style="mso-special-format:bullet">
+                        <a style="${link_style}"
+                           href="${webutils.canonical_url('changeset_home', repo_name=org_repo_name, revision=revision)}">
+                            ${webutils.shorter(desc, 80, firstline=True)}
+                        </a>
+                    </li>
+                %endfor
+            </ul>
+        </td>
+    </tr>
+    <tr>
+        <td>
+<% title = _('View Pull Request') %>\
+<%include file="button.html" args="url=pr_url,title=title,padding_bottom=False"/>\
+        </td>
+    </tr>
+</table>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/pull_request.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,33 @@
+<%block name="header">\
+<% title = _('Mention on Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) if is_mention else _('Added as Reviewer of Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) %>\
+<%include file="header.txt" args="title=title,link=pr_url"/>\
+</%block>\
+
+${_('Pull request')|n} \
+${pr_nice_id|n} \
+"${pr_title|n}" \
+${_('by')|n} \
+${pr_owner.full_name_and_username|n}
+${_('from')} \
+${pr_source_repo|n} \
+${_('branch')|n} \
+${pr_source_branch|n}
+${_('to')|n} \
+${pr_target_repo|n} \
+${_('branch')|n} \
+${pr_target_branch|n}
+
+
+${_('Description')|n}:
+
+${body|n}
+
+
+${_('Changesets')|n}:
+
+%for revision, desc in pr_revisions:
+${webutils.shorter(desc, 80, firstline=True)|n}
+%endfor
+
+<% title = _('View Pull Request') %>\
+<%include file="button.txt" args="url=pr_url,title=title"/>\
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/pull_request_comment.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,47 @@
+<%inherit file="main.html"/>\
+\
+<%block name="header">\
+<% title = _('Mention in Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) if is_mention else _('Pull Request %s "%s" Closed') % (pr_nice_id, pr_title) if closing_pr else _('Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) %>\
+<%include file="header.html" args="title=title,link=pr_comment_url"/>\
+</%block>\
+\
+<table cellpadding="0" cellspacing="0" border="0" width="100%">
+    <tr>
+        <td>
+<%include file="comment.html" args="text=body,author=pr_comment_user,status=status_change,close=closing_pr"/>\
+        </td>
+    </tr>
+    <tr>
+        <td height="30px" style="height:30px"></td>
+    </tr>
+    <tr>
+        <td>
+            <div>
+                ${_('Pull request')}
+                <a style="${link_style}"
+                   href="${pr_url}">${pr_nice_id} "${pr_title}"</a>
+                ${_('by')}
+                <span style="${data_style}">${pr_owner.full_name_and_username}</span>.
+            </div>
+            <div>
+                ${_('from')}
+                <a style="${link_text_style}"
+                   href="${pr_source_repo}">${pr_source_repo}</a>
+                ${_('branch')}
+                <span style="${data_style}">${pr_source_branch}</span>
+                <br/>
+                ${_('to')}
+                <a style="${link_text_style}"
+                   href="${pr_target_repo}">${pr_target_repo}</a>
+                ${_('branch')}
+                <span style="${data_style}">${pr_target_branch}</span>:
+            </div>
+        </td>
+    </tr>
+    <tr>
+        <td>
+<% title = _('View Comment') %>\
+<%include file="button.html" args="url=pr_comment_url,title=title,padding_bottom=False"/>\
+        </td>
+    </tr>
+</table>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/pull_request_comment.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,23 @@
+<%block name="header">\
+<% title = _('Mention in Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) if is_mention else _('Pull Request %s "%s" Closed') % (pr_nice_id, pr_title) if closing_pr else _('Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) %>\
+<%include file="header.txt" args="title=title,link=pr_comment_url"/>\
+</%block>\
+
+<%include file="comment.txt" args="text=body,author=pr_comment_user,status=status_change,close=closing_pr"/>\
+
+${_('Pull request')|n} \
+${pr_nice_id|n} \
+"${pr_title|n}" \
+${_('by')|n} \
+${pr_owner.full_name_and_username|n}
+${_('from')} \
+${pr_source_repo|n} \
+${_('branch')|n} \
+${pr_source_branch|n}
+${_('to')|n} \
+${pr_target_repo|n} \
+${_('branch')|n} \
+${pr_target_branch|n}
+
+<% title = _('View Comment') %>\
+<%include file="button.txt" args="url=pr_comment_url,title=title"/>\
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/registration.html	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,46 @@
+## -*- coding: utf-8 -*-
+<%inherit file="main.html"/>\
+\
+<%block name="header">\
+<% title = _('New User Registration') %>\
+<%include file="header.html" args="title=title,link=registered_user_url"/>\
+</%block>\
+\
+<table cellpadding="0" cellspacing="0" border="0" width="100%">
+    <tr>
+        <td>
+            ${_('Username')}:
+        </td>
+        <td style="${data_style}">
+            ${new_username}
+        </td>
+    </tr>
+    <tr>
+        <td height="10px" style="height:10px" colspan="2"></td>
+    </tr>
+    <tr>
+        <td>
+            ${_('Full Name')}:
+        </td>
+        <td style="${data_style}">
+            ${new_full_name}
+        </td>
+    </tr>
+    <tr>
+        <td height="10px" style="height:10px" colspan="2"></td>
+    </tr>
+    <tr>
+        <td>
+            ${_('Email')}:
+        </td>
+        <td style="${data_style}">
+            ${new_email}
+        </td>
+    </tr>
+    <tr>
+        <td colspan="2">
+<% title = _('View User Profile') %>\
+<%include file="button.html" args="url=registered_user_url,title=title,padding_bottom=False"/>\
+        </td>
+    </tr>
+</table>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/email/registration.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,13 @@
+<%block name="header">\
+<% title = _('New User Registration') %>\
+<%include file="header.txt" args="title=title,link=registered_user_url"/>\
+</%block>\
+
+${_('Username')|n}: ${new_username|n}
+
+${_('Full Name')|n}: ${new_full_name|n}
+
+${_('Email')|n}: ${new_email|n}
+
+<% title = _('View User Profile') %>\
+<%include file="button.txt" args="url=registered_user_url,title=title"/>\
--- a/kallithea/templates/email_templates/button.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-<%page args="url,title='',padding_top=True,padding_bottom=True" />\
-##<!-- button -->
-<center>
-    <table cellspacing="0" cellpadding="0" style="margin-left:auto;margin-right:auto">
-        %if padding_top:
-        <tr>
-            <td height="25px" style="height:25px"></td>
-        </tr>
-        %endif
-        <tr>
-            <td style="border-collapse:collapse;border-radius:2px;text-align:center;display:block;border:solid 1px ${color_button};padding:11px 20px 11px 20px">
-                <a href="${url}" style="text-decoration:none;display:block" target="_blank">
-                    <center>
-                        <font size="3">
-                            <span style="${sans_style};font-weight:700;font-size:15px;line-height:14px;color:${color_button};white-space:nowrap;vertical-align:middle">${_(title)}</span>
-                        </font>
-                    </center>
-                </a>
-            </td>
-        </tr>
-        %if padding_bottom:
-        <tr>
-            <td height="25px" style="height:25px"></td>
-        </tr>
-        %endif
-    </table>
-</center>
-##<!-- /button -->
--- a/kallithea/templates/email_templates/button.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-<%page args="url,title" />\
-
-${title|n,unicode}: ${url}
--- a/kallithea/templates/email_templates/changeset_comment.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,40 +0,0 @@
-<%inherit file="main.html"/>\
-\
-<%block name="header">\
-<% title = _('Mention in Comment on Changeset "%s"') % h.shorter(message, 200, firstline=True) if is_mention else _('Comment on Changeset "%s"') % h.shorter(message, 200, firstline=True) %>\
-<%include file="header.html" args="title=title,link=cs_comment_url"/>\
-</%block>\
-\
-<table cellpadding="0" cellspacing="0" border="0" width="100%">
-    <tr>
-        <td>
-<%include file="comment.html" args="text=body,author=cs_comment_user,status=status_change"/>\
-        </td>
-    </tr>
-    <tr>
-        <td height="30px" style="height:30px"></td>
-    </tr>
-    <tr>
-        <td>
-            <div>
-                ${_('Changeset on')}
-                <a style="${link_text_style}"
-                   href="${cs_target_repo}">${cs_target_repo}</a>
-                ${_('branch')}
-                <span style="${data_style}">${branch}</span>:
-            </div>
-            <div>
-                "<a style="${link_style}"
-                   href="${cs_url}">${h.shorter(message, 60, firstline=True)}</a>"
-                ${_('by')}
-                <span style="${data_style}">${cs_author.full_name_and_username}</span>.
-            </div>
-        </td>
-    </tr>
-    <tr>
-        <td>
-<% title = _('View Comment') %>\
-<%include file="button.html" args="url=cs_comment_url,title=title,padding_bottom=False"/>\
-        </td>
-    </tr>
-</table>
--- a/kallithea/templates/email_templates/changeset_comment.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,17 +0,0 @@
-<%block name="header">\
-<% title = _('Mention in Comment on Changeset "%s"') % h.shorter(message, 200, firstline=True) if is_mention else _('Comment on Changeset "%s"') % h.shorter(message, 200, firstline=True) %>\
-<%include file="header.txt" args="title=title,link=cs_comment_url"/>\
-</%block>\
-
-<%include file="comment.txt" args="text=body,author=cs_comment_user,status=status_change"/>\
-
-${_('Changeset on')|n,unicode} \
-${cs_target_repo|n,unicode} \
-${_('branch')|n,unicode} \
-${branch|n,unicode}:
-"${h.shorter(message, 60, firstline=True)|n,unicode}" \
-${_('by')|n,unicode} \
-${cs_author.full_name_and_username|n,unicode}.
-
-<% title = _('View Comment') %>\
-<%include file="button.txt" args="url=cs_comment_url,title=title"/>\
--- a/kallithea/templates/email_templates/comment.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,57 +0,0 @@
-<%page args="author,text,status,close=False" />\
-\
-##<!-- comment ->
-<table cellpadding="0" cellspacing="0" width="100%" border="0" bgcolor="${color_background_grey}" style="border:1px solid ${color_border};border-radius:4px">
-    <tr>
-        <td height="10px" style="height:10px" colspan="3"></td>
-    </tr>
-    <tr>
-        <td width="20px" style="width:20px"></td>
-        <td>
-            <div style="${emph_style}">${author}</div>
-        </td>
-        <td width="20px" style="width:20px"></td>
-    </tr>
-    <tr>
-        <td height="10px" style="height:10px" colspan="3" style="border-bottom:1px solid ${color_border}"></td>
-    </tr>
-    %if status or close:
-        <tr>
-            <td height="10px" style="height:10px" colspan="3"></td>
-        </tr>
-        <tr>
-            <td width="20px" style="width:20px"></td>
-            <td>
-                %if status:
-                    <div style="font-weight:600">
-                        ${_('Status change:')}
-                        ${status}
-                    </div>
-                %endif
-                %if close:
-                    <div style="font-weight:600">
-                        ${_('The pull request has been closed.')}
-                    </div>
-                %endif
-            </td>
-            <td width="20px" style="width:20px"></td>
-        </tr>
-        <tr>
-            <td height="10px" style="height:10px" colspan="3" style="border-bottom:1px solid ${color_border}"></td>
-        </tr>
-    %endif
-    <tr>
-        <td height="10px" style="height:10px" colspan="3"></td>
-    </tr>
-    <tr>
-        <td width="20px" style="width:20px"></td>
-        <td>
-            <div style="${comment_style}">${text}</div>
-        </td>
-        <td width="20px" style="width:20px"></td>
-    </tr>
-    <tr>
-        <td height="10px" style="height:10px" colspan="3"></td>
-    </tr>
-</table>
-##<!-- /comment ->
--- a/kallithea/templates/email_templates/comment.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-<%page args="author,text,status,close=False" />\
-${author|n,unicode}:
-
-%if status:
-${_('Status change:')|n,unicode} \
-${status|n,unicode}
-
-%endif
-%if close:
-${_('The pull request has been closed.')|n,unicode}
-
-%endif
-${text|n,unicode}
-
-## Trailing empty line
--- a/kallithea/templates/email_templates/default.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-<%inherit file="main.html"/>\
-\
-<%block name="header">\
-<% title = _('Message') %>\
-<%include file="header.html" args="title=title,link=None"/>\
-</%block>\
-\
-<table cellpadding="0" cellspacing="0" border="0" width="100%">
-    <tr>
-        <td style="${comment_style}">${body}</td>
-    </tr>
-</table>
--- a/kallithea/templates/email_templates/default.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-<%block name="header">\
-</%block>\
-\
-${body|n,unicode}\
--- a/kallithea/templates/email_templates/header.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-<%page args="title,link" />\
-\
-##<!-- header -->
-<table bgcolor="${color_background_grey}" width="100%" cellpadding="0" cellspacing="0"
-       style="border-bottom:1px solid ${color_border}">
-    <tr>
-        <td height="20px" style="height:20px" colspan="3"></td>
-    </tr>
-    <tr>
-        <td width="30px" style="width:30px"></td>
-        <td style="${sans_style};font-size:19px;line-height:24px">
-        %if link is not None:
-            <a style="text-decoration:none;${emph_style}" href="${link}"
-               target="_blank">${title}</a>
-        %else:
-            <span style="${emph_style}">${title}</span>
-        %endif
-        </td>
-        <td width="30px" style="width:30px"></td>
-    </tr>
-    <tr>
-        <td height="20px" style="height:20px" colspan="3"></td>
-    </tr>
-</table>
-##<!-- /header -->
--- a/kallithea/templates/email_templates/header.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-<%page args="title,link" />\
-%if link is not None:
-${link}
-
-%endif
-${title|n,unicode}
-
-## Trailing empty line
--- a/kallithea/templates/email_templates/main.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,42 +0,0 @@
-<!doctype html>
-<html lang="en">
-<head>
-    <title></title>
-    <meta name="viewport" content="width=device-width">
-    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-</head>
-<body>
-<table align="center" cellpadding="0" cellspacing="0" border="0" style="min-width:348px;max-width:800px;${default_style}">
-    <tr>
-        <td width="30px" style="width:30px"></td>
-        <td>
-            <table width="100%" cellpadding="0" cellspacing="0" border="0"
-                   style="table-layout:fixed;${sans_style};border:1px solid ${color_border}">
-                <tr><td width="30px" style="width:30px"></td><td></td><td width="30px" style="width:30px"></td></tr>
-                <tr>
-                    <td colspan="3">
-<%block name="header"/>\
-                    </td>
-                </tr>
-                <tr>
-                    <td height="30px" style="height:30px" colspan="3"></td>
-                </tr>
-                <tr>
-                    <td></td>
-                    <td>
-                        ##<!-- body -->
-${self.body()}\
-                        ##<!-- /body -->
-                    </td>
-                    <td></td>
-                </tr>
-                <tr>
-                    <td height="30px" style="height:30px" colspan="3"></td>
-                </tr>
-            </table>
-        </td>
-        <td width="30px" style="width:30px"></td>
-    </tr>
-</table>
-</body>
-</html>
--- a/kallithea/templates/email_templates/password_reset.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,48 +0,0 @@
-<%inherit file="main.html"/>\
-\
-<%block name="header">\
-<% title = _('Password Reset Request') %>\
-<%include file="header.html" args="title=title,link=None"/>\
-</%block>\
-\
-<table cellpadding="0" cellspacing="0" border="0" width="100%" style="table-layout:fixed;word-wrap:break-word;">
-    <tr>
-        <td>${_('Hello %s') % user},</td>
-    </tr>
-    <tr>
-        <td height="10px" style="height:10px"></td>
-    </tr>
-    <tr>
-        <td>
-            ${_('We have received a request to reset the password for your account.')}
-        </td>
-    </tr>
-    <tr>
-        <td height="10px" style="height:10px"></td>
-    </tr>
-    <tr>
-        <td>
-            %if reset_token is None:
-                <div>${_('This account is however managed outside this system and the password cannot be changed here.')}</div>
-            %else:
-                <div>
-                    ${_('To set a new password, click the following link')}:
-                    <br/>
-                    <a style="${link_style}" href="${reset_url}"
-                        target="_blank">${reset_url}</a>
-                    <br/>
-                    ${_("Should you not be able to use the link above, please type the following code into the password reset form")}:
-                    <code>${reset_token}</code>
-                </div>
-            %endif
-        </td>
-    </tr>
-    <tr>
-        <td height="10px" style="height:10px"></td>
-    </tr>
-    <tr>
-        <td>
-            ${_("If it weren't you who requested the password reset, just disregard this message.")}
-        </td>
-    </tr>
-</table>
--- a/kallithea/templates/email_templates/password_reset.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-<%block name="header">\
-<% title = _('Password Reset Request') %>\
-<%include file="header.txt" args="title=title,link=None"/>\
-</%block>\
-\
-${_('Hello %s') % user|n,unicode},
-
-${_('We have received a request to reset the password for your account.')|n,unicode}
-
-%if reset_token is None:
-${_('This account is however managed outside this system and the password cannot be changed here.')|n,unicode}
-%else:
-${_('To set a new password, click the following link')|n,unicode}:
-
-${reset_url|n,unicode}
-
-${_("Should you not be able to use the link above, please type the following code into the password reset form")|n,unicode}:
-${reset_token|n,unicode}
-%endif
-
-${_("If it weren't you who requested the password reset, just disregard this message.")|n,unicode}
--- a/kallithea/templates/email_templates/pull_request.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,89 +0,0 @@
-<%inherit file="main.html"/>\
-\
-<%block name="header">\
-<% title = _('Mention on Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) if is_mention else _('Added as Reviewer of Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) %>\
-<%include file="header.html" args="title=title,link=pr_url"/>\
-</%block>\
-\
-<table cellpadding="0" cellspacing="0" border="0" width="100%">
-    <tr>
-        <td>
-            <div>
-                ${_('Pull request')}
-                <a style="${link_style}"
-                   href="${pr_url}">${pr_nice_id} "${pr_title}"</a>
-                ${_('by')}
-                <span style="${data_style}">${pr_owner.full_name_and_username}</span>.
-            </div>
-            <div>
-                ${_('from')}
-                <a style="${link_text_style}"
-                   href="${pr_source_repo}">${pr_source_repo}</a>
-                ${_('branch')}
-                <span style="${data_style}">${pr_source_branch}</span>
-                <br/>
-                ${_('to')}
-                <a style="${link_text_style}"
-                   href="${pr_target_repo}">${pr_target_repo}</a>
-                ${_('branch')}
-                <span style="${data_style}">${pr_target_branch}</span>
-            </div>
-        </td>
-    </tr>
-    <tr><td height="10px" style="height:10px"></td></tr>
-    <tr>
-        <td>
-            <div>
-                ${_('Description')}:
-            </div>
-        </td>
-    </tr>
-    <tr><td height="10px" style="height:10px"></td></tr>
-    <tr>
-        <td>
-            <table cellpadding="0" cellspacing="0" width="100%" border="0" bgcolor="${color_background_grey}" style="border:1px solid ${color_border};border-radius:4px">
-                <tr>
-                    <td height="10px" style="height:10px" colspan="3"></td>
-                </tr>
-                <tr>
-                    <td width="20px" style="width:20px"></td>
-                    <td>
-                        <div style="${comment_style}">${body}</div>
-                    </td>
-                    <td width="20px" style="width:20px"></td>
-                </tr>
-                <tr>
-                    <td height="10px" style="height:10px" colspan="3"></td>
-                </tr>
-            </table>
-        </td>
-    </tr>
-    <tr><td height="15px" style="height:15px"></td></tr>
-    <tr>
-        <td>
-            <div>${_('Changesets')}:</div>
-        </td>
-    </tr>
-    <tr><td height="10px" style="height:10px"></td></tr>
-
-    <tr>
-        <td style="${sans_style}">
-            <ul style="color:${color_link};padding-left:15px;margin:0">
-                %for revision, desc in pr_revisions:
-                    <li style="mso-special-format:bullet">
-                        <a style="${link_style}"
-                           href="${h.canonical_url('changeset_home', repo_name=org_repo_name, revision=revision)}">
-                            ${h.shorter(desc, 80, firstline=True)}
-                        </a>
-                    </li>
-                %endfor
-            </ul>
-        </td>
-    </tr>
-    <tr>
-        <td>
-<% title = _('View Pull Request') %>\
-<%include file="button.html" args="url=pr_url,title=title,padding_bottom=False"/>\
-        </td>
-    </tr>
-</table>
--- a/kallithea/templates/email_templates/pull_request.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,33 +0,0 @@
-<%block name="header">\
-<% title = _('Mention on Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) if is_mention else _('Added as Reviewer of Pull Request %s "%s" by %s') % (pr_nice_id, pr_title, pr_user_created) %>\
-<%include file="header.txt" args="title=title,link=pr_url"/>\
-</%block>\
-
-${_('Pull request')|n,unicode} \
-${pr_nice_id|n,unicode} \
-"${pr_title|n,unicode}" \
-${_('by')|n,unicode} \
-${pr_owner.full_name_and_username|n,unicode}
-${_('from')} \
-${pr_source_repo|n,unicode} \
-${_('branch')|n,unicode} \
-${pr_source_branch|n,unicode}
-${_('to')|n,unicode} \
-${pr_target_repo|n,unicode} \
-${_('branch')|n,unicode} \
-${pr_target_branch|n,unicode}
-
-
-${_('Description')|n,unicode}:
-
-${body|n,unicode}
-
-
-${_('Changesets')|n,unicode}:
-
-%for revision, desc in pr_revisions:
-${h.shorter(desc, 80, firstline=True)|n,unicode}
-%endfor
-
-<% title = _('View Pull Request') %>\
-<%include file="button.txt" args="url=pr_url,title=title"/>\
--- a/kallithea/templates/email_templates/pull_request_comment.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,47 +0,0 @@
-<%inherit file="main.html"/>\
-\
-<%block name="header">\
-<% title = _('Mention in Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) if is_mention else _('Pull Request %s "%s" Closed') % (pr_nice_id, pr_title) if closing_pr else _('Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) %>\
-<%include file="header.html" args="title=title,link=pr_comment_url"/>\
-</%block>\
-\
-<table cellpadding="0" cellspacing="0" border="0" width="100%">
-    <tr>
-        <td>
-<%include file="comment.html" args="text=body,author=pr_comment_user,status=status_change,close=closing_pr"/>\
-        </td>
-    </tr>
-    <tr>
-        <td height="30px" style="height:30px"></td>
-    </tr>
-    <tr>
-        <td>
-            <div>
-                ${_('Pull request')}
-                <a style="${link_style}"
-                   href="${pr_url}">${pr_nice_id} "${pr_title}"</a>
-                ${_('by')}
-                <span style="${data_style}">${pr_owner.full_name_and_username}</span>.
-            </div>
-            <div>
-                ${_('from')}
-                <a style="${link_text_style}"
-                   href="${pr_source_repo}">${pr_source_repo}</a>
-                ${_('branch')}
-                <span style="${data_style}">${pr_source_branch}</span>
-                <br/>
-                ${_('to')}
-                <a style="${link_text_style}"
-                   href="${pr_target_repo}">${pr_target_repo}</a>
-                ${_('branch')}
-                <span style="${data_style}">${pr_target_branch}</span>:
-            </div>
-        </td>
-    </tr>
-    <tr>
-        <td>
-<% title = _('View Comment') %>\
-<%include file="button.html" args="url=pr_comment_url,title=title,padding_bottom=False"/>\
-        </td>
-    </tr>
-</table>
--- a/kallithea/templates/email_templates/pull_request_comment.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,23 +0,0 @@
-<%block name="header">\
-<% title = _('Mention in Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) if is_mention else _('Pull Request %s "%s" Closed') % (pr_nice_id, pr_title) if closing_pr else _('Comment on Pull Request %s "%s"') % (pr_nice_id, pr_title) %>\
-<%include file="header.txt" args="title=title,link=pr_comment_url"/>\
-</%block>\
-
-<%include file="comment.txt" args="text=body,author=pr_comment_user,status=status_change,close=closing_pr"/>\
-
-${_('Pull request')|n,unicode} \
-${pr_nice_id|n,unicode} \
-"${pr_title|n,unicode}" \
-${_('by')|n,unicode} \
-${pr_owner.full_name_and_username|n,unicode}
-${_('from')} \
-${pr_source_repo|n,unicode} \
-${_('branch')|n,unicode} \
-${pr_source_branch|n,unicode}
-${_('to')|n,unicode} \
-${pr_target_repo|n,unicode} \
-${_('branch')|n,unicode} \
-${pr_target_branch|n,unicode}
-
-<% title = _('View Comment') %>\
-<%include file="button.txt" args="url=pr_comment_url,title=title"/>\
--- a/kallithea/templates/email_templates/registration.html	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,46 +0,0 @@
-## -*- coding: utf-8 -*-
-<%inherit file="main.html"/>\
-\
-<%block name="header">\
-<% title = _('New User Registration') %>\
-<%include file="header.html" args="title=title,link=registered_user_url"/>\
-</%block>\
-\
-<table cellpadding="0" cellspacing="0" border="0" width="100%">
-    <tr>
-        <td>
-            ${_('Username')}:
-        </td>
-        <td style="${data_style}">
-            ${new_username}
-        </td>
-    </tr>
-    <tr>
-        <td height="10px" style="height:10px" colspan="2"></td>
-    </tr>
-    <tr>
-        <td>
-            ${_('Full Name')}:
-        </td>
-        <td style="${data_style}">
-            ${new_full_name}
-        </td>
-    </tr>
-    <tr>
-        <td height="10px" style="height:10px" colspan="2"></td>
-    </tr>
-    <tr>
-        <td>
-            ${_('Email')}:
-        </td>
-        <td style="${data_style}">
-            ${new_email}
-        </td>
-    </tr>
-    <tr>
-        <td colspan="2">
-<% title = _('View User Profile') %>\
-<%include file="button.html" args="url=registered_user_url,title=title,padding_bottom=False"/>\
-        </td>
-    </tr>
-</table>
--- a/kallithea/templates/email_templates/registration.txt	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-<%block name="header">\
-<% title = _('New User Registration') %>\
-<%include file="header.txt" args="title=title,link=registered_user_url"/>\
-</%block>\
-
-${_('Username')|n,unicode}: ${new_username|n,unicode}
-
-${_('Full Name')|n,unicode}: ${new_full_name|n,unicode}
-
-${_('Email')|n,unicode}: ${new_email|n,unicode}
-
-<% title = _('View User Profile') %>\
-<%include file="button.txt" args="url=registered_user_url,title=title"/>\
--- a/kallithea/templates/files/diff_2way.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/files/diff_2way.html	Thu May 27 21:27:37 2021 +0200
@@ -48,10 +48,10 @@
                           <i class="icon-docs"></i></a>
                       <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=c.node1.path,diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='raw')}"
                          data-toggle="tooltip"
-                         title="${_('Raw diff')}"><i class="icon-diff"></i></a>
+                         title="${_('Raw diff for this file')}"><i class="icon-diff"></i></a>
                       <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=c.node1.path,diff2=c.cs2.raw_id,diff1=c.cs1.raw_id,diff='download')}"
                          data-toggle="tooltip"
-                         title="${_('Download diff')}"><i class="icon-floppy"></i></a>
+                         title="${_('Download diff for this file')}"><i class="icon-floppy"></i></a>
                       ${h.checkbox('ignorews', label=_('Ignore whitespace'))}
                       ${h.checkbox('edit_mode', label=_('Edit'))}
                     </div>
@@ -60,7 +60,8 @@
         </div>
     </div>
 
-<script>'use strict';
+<script>
+'use strict';
 var orig1_url = ${h.jshtml(h.url('files_raw_home',repo_name=c.repo_name,f_path=c.node1.path,revision=c.cs1.raw_id))};
 var orig2_url = ${h.jshtml(h.url('files_raw_home',repo_name=c.repo_name,f_path=c.node2.path,revision=c.cs2.raw_id))};
 
--- a/kallithea/templates/files/files.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/files/files.html	Thu May 27 21:27:37 2021 +0200
@@ -36,7 +36,8 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+'use strict';
 var CACHE = {};
 var CACHE_EXPIRE = 5*60*1000; //cache for 5*60s
 //used to construct links from the search list
--- a/kallithea/templates/files/files_add.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/files/files_add.html	Thu May 27 21:27:37 2021 +0200
@@ -70,7 +70,8 @@
               </div>
             </div>
             ${h.end_form()}
-            <script>'use strict';
+            <script>
+                'use strict';
                 $(document).ready(function(){
                     var reset_url = ${h.jshtml(h.url('files_home',repo_name=c.repo_name,revision=c.cs.raw_id,f_path=c.f_path))};
                     var myCodeMirror = initCodeMirror('editor', ${h.jshtml(request.script_name)}, reset_url);
--- a/kallithea/templates/files/files_browser.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/files/files_browser.html	Thu May 27 21:27:37 2021 +0200
@@ -90,14 +90,15 @@
                      <td>
                          %if node.is_file():
                              <span data-toggle="tooltip" title="${h.fmt_date(node.last_changeset.date)}">
-                            ${h.age(node.last_changeset.date)}</span>
+                               ${h.age(node.last_changeset.date)}
+                             </span>
                          %endif
                      </td>
                      <td>
                          %if node.is_file():
                              <span title="${node.last_changeset.author}">
-                            ${h.person(node.last_changeset.author)}
-                            </span>
+                               ${h.person(node.last_changeset.author)}
+                             </span>
                          %endif
                      </td>
                 </tr>
@@ -109,7 +110,8 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         // init node filter if we pass GET param ?search=1
         var search_GET = ${h.js(request.GET.get('search',''))};
--- a/kallithea/templates/files/files_edit.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/files/files_edit.html	Thu May 27 21:27:37 2021 +0200
@@ -77,7 +77,8 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         var reset_url = ${h.jshtml(h.url('files_home',repo_name=c.repo_name,revision=c.cs.raw_id,f_path=c.file.path))};
         var myCodeMirror = initCodeMirror('editor', ${h.jshtml(request.script_name)}, reset_url);
--- a/kallithea/templates/followers/followers.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/followers/followers.html	Thu May 27 21:27:37 2021 +0200
@@ -25,7 +25,8 @@
         </div>
     </div>
 </div>
-<script>'use strict';
+<script>
+  'use strict';
   $(document).ready(function(){
     var $followers = $('#followers');
     $followers.on('click','.pager_link',function(e){
--- a/kallithea/templates/forks/fork.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/forks/fork.html	Thu May 27 21:27:37 2021 +0200
@@ -88,7 +88,8 @@
     </div>
     ${h.end_form()}
 </div>
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         $("#repo_group").select2({
             'dropdownAutoWidth': true
--- a/kallithea/templates/forks/forks.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/forks/forks.html	Thu May 27 21:27:37 2021 +0200
@@ -25,7 +25,8 @@
         </div>
     </div>
 </div>
-<script>'use strict';
+<script>
+  'use strict';
   $(document).ready(function(){
       var $forks = $('#forks');
       $forks.on('click','.pager_link',function(e){
--- a/kallithea/templates/index_base.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/index_base.html	Thu May 27 21:27:37 2021 +0200
@@ -16,11 +16,10 @@
                 <%
                     gr_name = c.group.group_name if c.group else None
                     # create repositories with write permission on group is set to true
-                    create_on_write = h.HasPermissionAny('hg.create.write_on_repogroup.true')()
                     group_admin = h.HasRepoGroupPermissionLevel('admin')(gr_name, 'can write into group index page')
                     group_write = h.HasRepoGroupPermissionLevel('write')(gr_name, 'can write into group index page')
                 %>
-                %if h.HasPermissionAny('hg.admin','hg.create.repository')() or (group_admin or (group_write and create_on_write)):
+                %if h.HasPermissionAny('hg.admin','hg.create.repository')() or group_admin or group_write:
                   %if c.group:
                         <a href="${h.url('new_repo',parent_group=c.group.group_id)}" class="btn btn-default btn-xs"><i class="icon-plus"></i>${_('Add Repository')}</a>
                         %if h.HasPermissionAny('hg.admin')() or h.HasRepoGroupPermissionLevel('admin')(c.group.group_name):
@@ -44,7 +43,8 @@
         </div>
     </div>
 
-      <script>'use strict';
+      <script>
+        'use strict';
         var data = ${h.js(c.data)};
         $("#repos_list_wrap").DataTable({
                 data: data.records,
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/ini/template.ini.mako	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,597 @@
+## -*- coding: utf-8 -*-
+<%text>##</%text>#################################################################################
+<%text>##</%text>#################################################################################
+<%text>##</%text> Kallithea config file generated with kallithea-cli ${'%-27s' % version       }##
+<%text>##</%text>                                                                               ##
+<%text>##</%text> The %(here)s variable will generally be replaced with the parent directory of ##
+<%text>##</%text> this file. Other use of % must be escaped as %% .                             ##
+<%text>##</%text>#################################################################################
+<%text>##</%text>#################################################################################
+
+[DEFAULT]
+
+<%text>##</%text>##############################################################################
+<%text>##</%text> Email settings                                                             ##
+<%text>##</%text>                                                                            ##
+<%text>##</%text> Refer to the documentation ("Email settings") for more details.            ##
+<%text>##</%text>                                                                            ##
+<%text>##</%text> It is recommended to use a valid sender address that passes access         ##
+<%text>##</%text> validation and spam filtering in mail servers.                             ##
+<%text>##</%text>##############################################################################
+
+<%text>##</%text> 'From' header for application emails. You can optionally add a name.
+<%text>##</%text> Default:
+#app_email_from = Kallithea
+<%text>##</%text> Examples:
+#app_email_from = Kallithea <kallithea-noreply@example.com>
+#app_email_from = kallithea-noreply@example.com
+
+<%text>##</%text> Subject prefix for application emails.
+<%text>##</%text> A space between this prefix and the real subject is automatically added.
+<%text>##</%text> Default:
+#email_prefix =
+<%text>##</%text> Example:
+#email_prefix = [Kallithea]
+
+<%text>##</%text> Recipients for error emails and fallback recipients of application mails.
+<%text>##</%text> Multiple addresses can be specified, comma-separated.
+<%text>##</%text> Only addresses are allowed, do not add any name part.
+<%text>##</%text> Default:
+#email_to =
+<%text>##</%text> Examples:
+#email_to = admin@example.com
+#email_to = admin@example.com,another_admin@example.com
+email_to =
+
+<%text>##</%text> 'From' header for error emails. You can optionally add a name.
+<%text>##</%text> Default: (none)
+<%text>##</%text> Examples:
+#error_email_from = Kallithea Errors <kallithea-noreply@example.com>
+#error_email_from = kallithea_errors@example.com
+error_email_from =
+
+<%text>##</%text> SMTP server settings
+<%text>##</%text> If specifying credentials, make sure to use secure connections.
+<%text>##</%text> Default: Send unencrypted unauthenticated mails to the specified smtp_server.
+<%text>##</%text> For "SSL", use smtp_use_ssl = true and smtp_port = 465.
+<%text>##</%text> For "STARTTLS", use smtp_use_tls = true and smtp_port = 587.
+smtp_server =
+smtp_username =
+smtp_password =
+smtp_port =
+smtp_use_ssl = false
+smtp_use_tls = false
+
+%if http_server != 'uwsgi':
+<%text>##</%text> Entry point for 'gearbox serve'
+[server:main]
+host = ${host}
+port = ${port}
+
+%if http_server == 'gearbox':
+<%text>##</%text> Gearbox serve uses the built-in development web server ##
+use = egg:gearbox#wsgiref
+<%text>##</%text> nr of worker threads to spawn
+threadpool_workers = 1
+<%text>##</%text> max request before thread respawn
+threadpool_max_requests = 100
+<%text>##</%text> option to use threads of process
+use_threadpool = true
+
+%elif http_server == 'gevent':
+<%text>##</%text> Gearbox serve uses the gevent web server ##
+use = egg:gearbox#gevent
+
+%elif http_server == 'waitress':
+<%text>##</%text> Gearbox serve uses the Waitress web server ##
+use = egg:waitress#main
+<%text>##</%text> avoid multi threading
+threads = 1
+<%text>##</%text> allow push of repos bigger than the default of 1 GB
+max_request_body_size = 107374182400
+<%text>##</%text> use poll instead of select, fixes fd limits, may not work on old
+<%text>##</%text> windows systems.
+#asyncore_use_poll = True
+
+%elif http_server == 'gunicorn':
+<%text>##</%text> Gearbox serve uses the Gunicorn web server ##
+use = egg:gunicorn#main
+<%text>##</%text> number of process workers. You must set `instance_id = *` when this option
+<%text>##</%text> is set to more than one worker
+workers = 4
+<%text>##</%text> process name
+proc_name = kallithea
+<%text>##</%text> type of worker class, one of sync, eventlet, gevent, tornado
+<%text>##</%text> recommended for bigger setup is using of of other than sync one
+worker_class = sync
+max_requests = 1000
+<%text>##</%text> amount of time a worker can handle request before it gets killed and
+<%text>##</%text> restarted
+timeout = 3600
+
+%endif
+%else:
+<%text>##</%text> UWSGI ##
+[uwsgi]
+<%text>##</%text> Note: this section is parsed by the uWSGI .ini parser when run as:
+<%text>##</%text> uwsgi --venv /srv/kallithea/venv --ini-paste-logged my.ini
+<%text>##</%text> Note: in uWSGI 2.0.18 or older, pastescript needs to be installed to
+<%text>##</%text> get correct application logging. In later versions this is not necessary.
+<%text>##</%text> pip install pastescript
+
+<%text>##</%text> HTTP Basics:
+http-socket = ${host}:${port}
+buffer-size = 65535                    ; Mercurial will use huge GET headers for discovery
+
+<%text>##</%text> Scaling:
+master = true                          ; Use separate master and worker processes
+auto-procname = true                   ; Name worker processes accordingly
+lazy = true                            ; App *must* be loaded in workers - db connections can't be shared
+workers = 4                            ; On demand scaling up to this many worker processes
+cheaper = 1                            ; Initial and on demand scaling down to this many worker processes
+max-requests = 1000                    ; Graceful reload of worker processes to avoid leaks
+
+<%text>##</%text> Tweak defaults:
+strict = true                          ; Fail on unknown config directives
+enable-threads = true                  ; Enable Python threads (not threaded workers)
+vacuum = true                          ; Delete sockets during shutdown
+single-interpreter = true
+die-on-term = true                     ; Shutdown when receiving SIGTERM (default is respawn)
+need-app = true                        ; Exit early if no app can be loaded.
+reload-on-exception = true             ; Don't assume that the application worker can process more requests after a severe error
+
+%endif
+<%text>##</%text> middleware for hosting the WSGI application under a URL prefix
+#[filter:proxy-prefix]
+#use = egg:PasteDeploy#prefix
+#prefix = /<your-prefix>
+#translate_forwarded_server = False
+
+[app:main]
+use = egg:kallithea
+<%text>##</%text> enable proxy prefix middleware
+#filter-with = proxy-prefix
+
+full_stack = true
+static_files = true
+
+<%text>##</%text> Internationalization (see setup documentation for details)
+<%text>##</%text> By default, the languages requested by the browser are used if available, with English as default.
+<%text>##</%text> Set i18n.enabled=false to disable automatic language choice.
+#i18n.enabled = true
+<%text>##</%text> To Force a language, set i18n.enabled=false and specify the language in i18n.lang.
+<%text>##</%text> Valid values are the names of subdirectories in kallithea/i18n with a LC_MESSAGES/kallithea.mo
+#i18n.lang = en
+
+cache_dir = %(here)s/data
+index_dir = %(here)s/data/index
+
+<%text>##</%text> uncomment and set this path to use archive download cache
+archive_cache_dir = %(here)s/data/tarballcache
+
+<%text>##</%text> change this to unique ID for security
+app_instance_uuid = ${uuid()}
+
+<%text>##</%text> cut off limit for large diffs (size in bytes)
+cut_off_limit = 256000
+
+<%text>##</%text> WSGI environment variable to get the IP address of the client (default REMOTE_ADDR)
+#remote_addr_variable = HTTP_X_FORWARDED_FOR
+
+<%text>##</%text> WSGI environment variable to get the protocol (http or https) of the client connection (default wsgi.url_scheme)
+#url_scheme_variable = HTTP_X_FORWARDED_PROTO
+
+<%text>##</%text> always pretend the client connected using HTTPS (default false)
+#force_https = true
+
+<%text>##</%text> use Strict-Transport-Security headers (default false)
+#use_htsts = true
+
+<%text>##</%text> number of commits stats will parse on each iteration
+commit_parse_limit = 25
+
+<%text>##</%text> Path to Python executable to be used for git hooks.
+<%text>##</%text> This value will be written inside the git hook scripts as the text
+<%text>##</%text> after '#!' (shebang). When empty or not defined, the value of
+<%text>##</%text> 'sys.executable' at the time of installation of the git hooks is
+<%text>##</%text> used, which is correct in many cases but for example not when using uwsgi.
+<%text>##</%text> If you change this setting, you should reinstall the Git hooks via
+<%text>##</%text> Admin > Settings > Remap and Rescan.
+#git_hook_interpreter = /srv/kallithea/venv/bin/python3
+%if git_hook_interpreter:
+git_hook_interpreter = ${git_hook_interpreter}
+%endif
+
+<%text>##</%text> path to git executable
+git_path = git
+
+<%text>##</%text> git rev filter option, --all is the default filter, if you need to
+<%text>##</%text> hide all refs in changelog switch this to --branches --tags
+#git_rev_filter = --branches --tags
+
+<%text>##</%text> RSS feed options
+rss_cut_off_limit = 256000
+rss_items_per_page = 10
+rss_include_diff = false
+
+<%text>##</%text> options for showing and identifying changesets
+show_sha_length = 12
+show_revision_number = false
+
+<%text>##</%text> Canonical URL to use when creating full URLs in UI and texts.
+<%text>##</%text> Useful when the site is available under different names or protocols.
+<%text>##</%text> Defaults to what is provided in the WSGI environment.
+#canonical_url = https://kallithea.example.com/repos
+
+<%text>##</%text> gist URL alias, used to create nicer urls for gist. This should be an
+<%text>##</%text> url that does rewrites to _admin/gists/<gistid>.
+<%text>##</%text> example: http://gist.example.com/{gistid}. Empty means use the internal
+<%text>##</%text> Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid>
+gist_alias_url =
+
+<%text>##</%text> default encoding used to convert from and to unicode
+<%text>##</%text> can be also a comma separated list of encoding in case of mixed encodings
+default_encoding = utf-8
+
+<%text>##</%text> Set Mercurial encoding, similar to setting HGENCODING before launching Kallithea
+hgencoding = utf-8
+
+<%text>##</%text> issue tracker for Kallithea (leave blank to disable, absent for default)
+#bugtracker = https://bitbucket.org/conservancy/kallithea/issues
+
+<%text>##</%text> issue tracking mapping for commit messages, comments, PR descriptions, ...
+<%text>##</%text> Refer to the documentation ("Integration with issue trackers") for more details.
+
+<%text>##</%text> regular expression to match issue references
+<%text>##</%text> This pattern may/should contain parenthesized groups, that can
+<%text>##</%text> be referred to in issue_server_link or issue_sub using Python backreferences
+<%text>##</%text> (e.g. \1, \2, ...). You can also create named groups with '(?P<groupname>)'.
+<%text>##</%text> To require mandatory whitespace before the issue pattern, use:
+<%text>##</%text> (?:^|(?<=\s)) before the actual pattern, and for mandatory whitespace
+<%text>##</%text> behind the issue pattern, use (?:$|(?=\s)) after the actual pattern.
+
+issue_pat = #(\d+)
+
+<%text>##</%text> server url to the issue
+<%text>##</%text> This pattern may/should contain backreferences to parenthesized groups in issue_pat.
+<%text>##</%text> A backreference can be \1, \2, ... or \g<groupname> if you specified a named group
+<%text>##</%text> called 'groupname' in issue_pat.
+<%text>##</%text> The special token {repo} is replaced with the full repository name
+<%text>##</%text> including repository groups, while {repo_name} is replaced with just
+<%text>##</%text> the name of the repository.
+
+issue_server_link = https://issues.example.com/{repo}/issue/\1
+
+<%text>##</%text> substitution pattern to use as the link text
+<%text>##</%text> If issue_sub is empty, the text matched by issue_pat is retained verbatim
+<%text>##</%text> for the link text. Otherwise, the link text is that of issue_sub, with any
+<%text>##</%text> backreferences to groups in issue_pat replaced.
+
+issue_sub =
+
+<%text>##</%text> issue_pat, issue_server_link and issue_sub can have suffixes to specify
+<%text>##</%text> multiple patterns, to other issues server, wiki or others
+<%text>##</%text> below an example how to create a wiki pattern
+<%text>##</%text> wiki-some-id -> https://wiki.example.com/some-id
+
+#issue_pat_wiki = wiki-(\S+)
+#issue_server_link_wiki = https://wiki.example.com/\1
+#issue_sub_wiki = WIKI-\1
+
+<%text>##</%text> alternative return HTTP header for failed authentication. Default HTTP
+<%text>##</%text> response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with
+<%text>##</%text> handling that. Set this variable to 403 to return HTTPForbidden
+auth_ret_code =
+
+<%text>##</%text> allows to change the repository location in settings page
+allow_repo_location_change = True
+
+<%text>##</%text> allows to setup custom hooks in settings page
+allow_custom_hooks_settings = True
+
+<%text>##</%text> extra extensions for indexing, space separated and without the leading '.'.
+#index.extensions =
+#    gemfile
+#    lock
+
+<%text>##</%text> extra filenames for indexing, space separated
+#index.filenames =
+#    .dockerignore
+#    .editorconfig
+#    INSTALL
+#    CHANGELOG
+
+<%text>##</%text>##################################
+<%text>##</%text>            SSH CONFIG          ##
+<%text>##</%text>##################################
+
+<%text>##</%text> SSH is disabled by default, until an Administrator decides to enable it.
+ssh_enabled = false
+
+<%text>##</%text> File where users' SSH keys will be stored *if* ssh_enabled is true.
+#ssh_authorized_keys = /home/kallithea/.ssh/authorized_keys
+%if user_home_path:
+ssh_authorized_keys = ${user_home_path}/.ssh/authorized_keys
+%endif
+
+<%text>##</%text> Path to be used in ssh_authorized_keys file to invoke kallithea-cli with ssh-serve.
+#kallithea_cli_path = /srv/kallithea/venv/bin/kallithea-cli
+%if kallithea_cli_path:
+kallithea_cli_path = ${kallithea_cli_path}
+%endif
+
+<%text>##</%text> Locale to be used in the ssh-serve command.
+<%text>##</%text> This is needed because an SSH client may try to use its own locale
+<%text>##</%text> settings, which may not be available on the server.
+<%text>##</%text> See `locale -a` for valid values on this system.
+#ssh_locale = C.UTF-8
+%if ssh_locale:
+ssh_locale = ${ssh_locale}
+%endif
+
+<%text>##</%text>##################################
+<%text>##</%text>         CELERY CONFIG          ##
+<%text>##</%text>##################################
+
+<%text>##</%text> Note: Celery doesn't support Windows.
+use_celery = false
+
+<%text>##</%text> Celery config settings from https://docs.celeryproject.org/en/4.4.0/userguide/configuration.html prefixed with 'celery.'.
+
+<%text>##</%text> Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea':
+celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost
+
+celery.worker_concurrency = 2
+celery.worker_max_tasks_per_child = 100
+
+<%text>##</%text>##################################
+<%text>##</%text>          BEAKER CACHE          ##
+<%text>##</%text>##################################
+
+beaker.cache.data_dir = %(here)s/data/cache/data
+beaker.cache.lock_dir = %(here)s/data/cache/lock
+
+beaker.cache.regions = long_term,long_term_file
+
+beaker.cache.long_term.type = memory
+beaker.cache.long_term.expire = 36000
+beaker.cache.long_term.key_length = 256
+
+beaker.cache.long_term_file.type = file
+beaker.cache.long_term_file.expire = 604800
+beaker.cache.long_term_file.key_length = 256
+
+<%text>##</%text>##################################
+<%text>##</%text>        BEAKER SESSION          ##
+<%text>##</%text>##################################
+
+<%text>##</%text> Name of session cookie. Should be unique for a given host and path, even when running
+<%text>##</%text> on different ports. Otherwise, cookie sessions will be shared and messed up.
+session.key = kallithea
+<%text>##</%text> Sessions should always only be accessible by the browser, not directly by JavaScript.
+session.httponly = true
+<%text>##</%text> Session lifetime. 2592000 seconds is 30 days.
+session.timeout = 2592000
+
+<%text>##</%text> Server secret used with HMAC to ensure integrity of cookies.
+session.secret = ${uuid()}
+<%text>##</%text> Further, encrypt the data with AES.
+#session.encrypt_key = <key_for_encryption>
+#session.validate_key = <validation_key>
+
+<%text>##</%text> Type of storage used for the session, current types are
+<%text>##</%text> dbm, file, memcached, database, and memory.
+
+<%text>##</%text> File system storage of session data. (default)
+#session.type = file
+
+<%text>##</%text> Cookie only, store all session data inside the cookie. Requires secure secrets.
+#session.type = cookie
+
+<%text>##</%text> Database storage of session data.
+#session.type = ext:database
+#session.sa.url = postgresql://postgres:qwe@localhost/kallithea
+#session.table_name = db_session
+
+<%text>##</%text>##################################
+<%text>##</%text>        ERROR HANDLING          ##
+<%text>##</%text>##################################
+
+<%text>##</%text> Show a nice error page for application HTTP errors and exceptions (default true)
+#errorpage.enabled = true
+
+<%text>##</%text> Enable Backlash client-side interactive debugger (default false)
+<%text>##</%text> WARNING: *THIS MUST BE false IN PRODUCTION ENVIRONMENTS!!!*
+<%text>##</%text> This debug mode will allow all visitors to execute malicious code.
+#debug = false
+
+<%text>##</%text> Enable Backlash server-side error reporting (unless debug mode handles it client-side) (default true)
+#trace_errors.enable = true
+<%text>##</%text> Errors will be reported by mail if trace_errors.error_email is set.
+
+<%text>##</%text> Propagate email settings to ErrorReporter of TurboGears2
+<%text>##</%text> You do not normally need to change these lines
+get trace_errors.smtp_server = smtp_server
+get trace_errors.smtp_port = smtp_port
+get trace_errors.from_address = error_email_from
+get trace_errors.error_email = email_to
+get trace_errors.smtp_username = smtp_username
+get trace_errors.smtp_password = smtp_password
+get trace_errors.smtp_use_tls = smtp_use_tls
+
+<%text>##</%text>################################
+<%text>##</%text>        LOGVIEW CONFIG        ##
+<%text>##</%text>################################
+
+logview.sqlalchemy = #faa
+logview.pylons.templating = #bfb
+logview.pylons.util = #eee
+
+<%text>##</%text>#######################
+<%text>##</%text>      DB CONFIG      ##
+<%text>##</%text>#######################
+
+%if database_engine == 'sqlite':
+sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
+%else:
+#sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
+%endif
+%if database_engine == 'postgres':
+sqlalchemy.url = postgresql://kallithea:password@localhost/kallithea
+%else:
+#sqlalchemy.url = postgresql://kallithea:password@localhost/kallithea
+%endif
+%if database_engine == 'mysql':
+sqlalchemy.url = mysql://kallithea:password@localhost/kallithea?charset=utf8mb4
+%else:
+#sqlalchemy.url = mysql://kallithea:password@localhost/kallithea?charset=utf8mb4
+%endif
+<%text>##</%text> Note: the mysql:// prefix should also be used for MariaDB
+
+sqlalchemy.pool_recycle = 3600
+
+<%text>##</%text>##############################
+<%text>##</%text>   ALEMBIC CONFIGURATION    ##
+<%text>##</%text>##############################
+
+[alembic]
+script_location = kallithea:alembic
+
+<%text>##</%text>##############################
+<%text>##</%text>   LOGGING CONFIGURATION    ##
+<%text>##</%text>##############################
+
+[loggers]
+keys = root, routes, kallithea, sqlalchemy, tg, gearbox, beaker, templates, whoosh_indexer, werkzeug, backlash
+
+[handlers]
+keys = console, console_color, console_color_sql, null
+
+[formatters]
+keys = generic, color_formatter, color_formatter_sql
+
+<%text>##</%text>###########
+<%text>##</%text> LOGGERS ##
+<%text>##</%text>###########
+
+[logger_root]
+level = NOTSET
+handlers = console
+<%text>##</%text> For coloring based on log level:
+#handlers = console_color
+
+[logger_routes]
+level = WARN
+handlers =
+qualname = routes.middleware
+<%text>##</%text> "level = DEBUG" logs the route matched and routing variables.
+
+[logger_beaker]
+level = WARN
+handlers =
+qualname = beaker.container
+
+[logger_templates]
+level = WARN
+handlers =
+qualname = pylons.templating
+
+[logger_kallithea]
+level = WARN
+handlers =
+qualname = kallithea
+
+[logger_tg]
+level = WARN
+handlers =
+qualname = tg
+
+[logger_gearbox]
+level = WARN
+handlers =
+qualname = gearbox
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+<%text>##</%text> For coloring based on log level and pretty printing of SQL:
+#level = INFO
+#handlers = console_color_sql
+#propagate = 0
+
+[logger_whoosh_indexer]
+level = WARN
+handlers =
+qualname = whoosh_indexer
+
+[logger_werkzeug]
+level = WARN
+handlers =
+qualname = werkzeug
+
+[logger_backlash]
+level = WARN
+handlers =
+qualname = backlash
+
+<%text>##</%text>############
+<%text>##</%text> HANDLERS ##
+<%text>##</%text>############
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+formatter = generic
+
+[handler_console_color]
+<%text>##</%text> ANSI color coding based on log level
+class = StreamHandler
+args = (sys.stderr,)
+formatter = color_formatter
+
+[handler_console_color_sql]
+<%text>##</%text> ANSI color coding and pretty printing of SQL statements
+class = StreamHandler
+args = (sys.stderr,)
+formatter = color_formatter_sql
+
+[handler_null]
+class = NullHandler
+args = ()
+
+<%text>##</%text>##############
+<%text>##</%text> FORMATTERS ##
+<%text>##</%text>##############
+
+[formatter_generic]
+format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %Y-%m-%d %H:%M:%S
+
+[formatter_color_formatter]
+class = kallithea.lib.colored_formatter.ColorFormatter
+format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %Y-%m-%d %H:%M:%S
+
+[formatter_color_formatter_sql]
+class = kallithea.lib.colored_formatter.ColorFormatterSql
+format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %Y-%m-%d %H:%M:%S
+
+<%text>##</%text>###############
+<%text>##</%text> SSH LOGGING ##
+<%text>##</%text>###############
+
+<%text>##</%text> The default loggers use 'handler_console' that uses StreamHandler with
+<%text>##</%text> destination 'sys.stderr'. In the context of the SSH server process, these log
+<%text>##</%text> messages would be sent to the client, which is normally not what you want.
+<%text>##</%text> By default, when running ssh-serve, just use NullHandler and disable logging
+<%text>##</%text> completely. For other logging options, see:
+<%text>##</%text> https://docs.python.org/2/library/logging.handlers.html
+
+[ssh_serve:logger_root]
+level = CRITICAL
+handlers = null
+
+<%text>##</%text> Note: If logging is configured with other handlers, they might need similar
+<%text>##</%text> muting for ssh-serve too.
--- a/kallithea/templates/journal/journal.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/journal/journal.html	Thu May 27 21:27:37 2021 +0200
@@ -41,7 +41,8 @@
         </div>
     </div>
 
-<script>'use strict';
+<script>
+'use strict';
 
     $('#j_filter').click(function(){
         var $jfilter = $('#j_filter');
@@ -72,7 +73,8 @@
 
 </script>
 
-<script>'use strict';
+<script>
+    'use strict';
     $(document).ready(function(){
         var $journal = $('#journal');
         $journal.on('click','.pager_link',function(e){
--- a/kallithea/templates/login.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/login.html	Thu May 27 21:27:37 2021 +0200
@@ -49,10 +49,10 @@
 
             <div class="form-group">
                 <div>
-                    ${h.link_to(_('Forgot your password ?'),h.url('reset_password'))}
+                    ${h.link_to(_('Forgot your password?'),h.url('reset_password'))}
                     %if h.HasPermissionAny('hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')():
                         /
-                        ${h.link_to(_("Don't have an account ?"),h.url('register'))}
+                        ${h.link_to(_("Don't have an account?"),h.url('register'))}
                     %endif
                 </div>
             </div>
@@ -64,7 +64,8 @@
             </div>
         </div>
         ${h.end_form()}
-        <script>'use strict';
+        <script>
+        'use strict';
         $(document).ready(function(){
             $('#username').focus();
         });
--- a/kallithea/templates/password_reset.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/password_reset.html	Thu May 27 21:27:37 2021 +0200
@@ -53,7 +53,8 @@
                 </div>
         </div>
         ${h.end_form()}
-        <script>'use strict';
+        <script>
+         'use strict';
          $(document).ready(function(){
             $('#email').focus();
          });
--- a/kallithea/templates/password_reset_confirmation.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/password_reset_confirmation.html	Thu May 27 21:27:37 2021 +0200
@@ -54,7 +54,7 @@
         </div>
         ${h.end_form()}
     </div>
-   </div>
 </div>
 </div>
 </div>
+</div>
--- a/kallithea/templates/pullrequests/pullrequest.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/pullrequests/pullrequest.html	Thu May 27 21:27:37 2021 +0200
@@ -92,7 +92,8 @@
 </div>
 
 <script src="${h.url('/js/graph.js', ver=c.kallithea_version)}"></script>
-<script>'use strict';
+<script>
+  'use strict';
   pyroutes.register('pullrequest_repo_info', ${h.js(url('pullrequest_repo_info',repo_name='%(repo_name)s'))}, ['repo_name']);
 
   var pendingajax = undefined;
--- a/kallithea/templates/pullrequests/pullrequest_show.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/pullrequests/pullrequest_show.html	Thu May 27 21:27:37 2021 +0200
@@ -105,9 +105,9 @@
             %if c.cs_ranges:
               <div>
                ## TODO: use cs_ranges[-1] or org_ref_parts[1] in both cases?
-               %if h.is_hg(c.pull_request.org_repo):
-                 <span>hg pull ${c.pull_request.org_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl)} -r ${h.short_id(c.cs_ranges[-1].raw_id)}</span>
-               %elif h.is_git(c.pull_request.org_repo):
+               %if c.pull_request.org_repo.repo_type == 'hg':
+                 <span>hg pull ${c.pull_request.org_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl)} -r ${c.cs_ranges[-1].short_id}</span>
+               %elif c.pull_request.org_repo.repo_type == 'git':
                  <span>git pull ${c.pull_request.org_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl)} ${c.pull_request.org_ref_parts[1]}</span>
                %endif
               </div>
@@ -312,7 +312,8 @@
             </div>
         </div>
     </div>
-    <script>'use strict';
+    <script>
+    'use strict';
     // TODO: switch this to pyroutes
     var AJAX_COMMENT_URL = ${h.js(url('pullrequest_comment',repo_name=c.repo_name,pull_request_id=c.pull_request.pull_request_id))};
     var AJAX_COMMENT_DELETE_URL = ${h.js(url('pullrequest_comment_delete',repo_name=c.repo_name,comment_id='__COMMENT_ID__'))};
@@ -343,7 +344,8 @@
     ## main comment form and it status
     ${comment.comments(change_status=c.allowed_to_change_status)}
 
-    <script>'use strict';
+    <script>
+      'use strict';
       $(document).ready(function(){
           PullRequestAutoComplete($('#user'));
           SimpleUserAutoComplete($('#owner'));
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/py/extensions.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,221 @@
+# Additional mappings that are not present in the pygments lexers
+# used for building stats
+# format is {'ext':['Names']} eg. {'py':['Python']} note: there can be
+# more than one name for extension
+# NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP
+# build by pygments
+EXTRA_MAPPINGS = {}
+
+# additional lexer definitions for custom files
+# it's overrides pygments lexers, and uses defined name of lexer to colorize the
+# files. Format is {'ext': 'lexer_name'}
+# List of lexers can be printed running:
+# python -c "import pprint;from pygments import lexers;pprint.pprint([(x[0], x[1]) for x in lexers.get_all_lexers()]);"
+
+EXTRA_LEXERS = {}
+
+#==============================================================================
+# WHOOSH INDEX EXTENSIONS
+#==============================================================================
+# if INDEX_EXTENSIONS is [] it'll use pygments lexers extensions by default.
+# To set your own just add to this list extensions to index with content
+INDEX_EXTENSIONS = []
+
+# additional extensions for indexing besides the default from pygments
+# those gets added to INDEX_EXTENSIONS
+EXTRA_INDEX_EXTENSIONS = []
+
+
+#==============================================================================
+# POST CREATE REPOSITORY HOOK
+#==============================================================================
+# this function will be executed after each repository is created
+def CREATE_REPO_HOOK(*args, **kwargs):
+    """
+    Post create repository HOOK
+    kwargs available:
+     :param repo_name:
+     :param repo_type:
+     :param description:
+     :param private:
+     :param created_on:
+     :param enable_downloads:
+     :param repo_id:
+     :param owner_id:
+     :param enable_statistics:
+     :param clone_uri:
+     :param fork_id:
+     :param group_id:
+     :param created_by:
+    """
+
+
+#==============================================================================
+# PRE CREATE USER HOOK
+#==============================================================================
+# this function will be executed before each user is created
+def PRE_CREATE_USER_HOOK(*args, **kwargs):
+    """
+    Pre create user HOOK, it returns a tuple of bool, reason.
+    If bool is False the user creation will be stopped and reason
+    will be displayed to the user.
+    kwargs available:
+    :param username:
+    :param password:
+    :param email:
+    :param firstname:
+    :param lastname:
+    :param active:
+    :param admin:
+    :param created_by:
+    """
+    reason = 'allowed'
+    return True, reason
+
+
+#==============================================================================
+# POST CREATE USER HOOK
+#==============================================================================
+# this function will be executed after each user is created
+def CREATE_USER_HOOK(*args, **kwargs):
+    """
+    Post create user HOOK
+    kwargs available:
+      :param username:
+      :param full_name_or_username:
+      :param full_contact:
+      :param user_id:
+      :param name:
+      :param firstname:
+      :param short_contact:
+      :param admin:
+      :param lastname:
+      :param ip_addresses:
+      :param ldap_dn:
+      :param email:
+      :param api_key:
+      :param last_login:
+      :param full_name:
+      :param active:
+      :param password:
+      :param emails:
+      :param created_by:
+    """
+
+
+#==============================================================================
+# POST CREATE PULLREQUEST HOOK
+#==============================================================================
+# this function will be executed after a pull request is created
+def CREATE_PULLREQUEST_HOOK(*args, **kwargs):
+    """
+    Post create pull request HOOK
+    kwargs available:
+      :param pull_request_id:
+      :param title:
+      :param description:
+      :param created_on:
+      :param org_repo_id:
+      :param org_ref:
+      :param other_repo_id:
+      :param other_ref:
+      :param created_by:
+    There are other fields in 'class PullRequest' (kallithea/model/db.py) which
+    may or may not be useful for this hook.
+    """
+
+
+#==============================================================================
+# POST DELETE REPOSITORY HOOK
+#==============================================================================
+# this function will be executed after each repository deletion
+def DELETE_REPO_HOOK(*args, **kwargs):
+    """
+    Post delete repository HOOK
+    kwargs available:
+     :param repo_name:
+     :param repo_type:
+     :param description:
+     :param private:
+     :param created_on:
+     :param enable_downloads:
+     :param repo_id:
+     :param owner_id:
+     :param enable_statistics:
+     :param clone_uri:
+     :param fork_id:
+     :param group_id:
+     :param deleted_by:
+     :param deleted_on:
+    """
+
+
+#==============================================================================
+# POST DELETE USER HOOK
+#==============================================================================
+# this function will be executed after each user is deleted
+def DELETE_USER_HOOK(*args, **kwargs):
+    """
+    Post delete user HOOK
+    kwargs available:
+      :param username:
+      :param full_name_or_username:
+      :param full_contact:
+      :param user_id:
+      :param name:
+      :param firstname:
+      :param short_contact:
+      :param admin:
+      :param lastname:
+      :param ip_addresses:
+      :param ldap_dn:
+      :param email:
+      :param api_key:
+      :param last_login:
+      :param full_name:
+      :param active:
+      :param password:
+      :param emails:
+      :param deleted_by:
+    """
+
+
+#==============================================================================
+# POST PUSH HOOK
+#==============================================================================
+
+# this function will be executed after each push it's executed after the
+# build-in hook that Kallithea uses for logging pushes
+def PUSH_HOOK(*args, **kwargs):
+    """
+    Post push hook
+    kwargs available:
+
+      :param config: path to .ini config used
+      :param scm: type of VS 'git' or 'hg'
+      :param username: name of user who pushed
+      :param ip: ip of who pushed
+      :param action: push
+      :param repository: repository name
+      :param pushed_revs: list of pushed revisions
+    """
+
+
+#==============================================================================
+# POST PULL HOOK
+#==============================================================================
+
+# this function will be executed after each push it's executed after the
+# build-in hook that Kallithea uses for logging pulls
+def PULL_HOOK(*args, **kwargs):
+    """
+    Post pull hook
+    kwargs available::
+
+      :param config: path to .ini config used
+      :param scm: type of VS 'git' or 'hg'
+      :param username: name of user who pulled
+      :param ip: ip of who pulled
+      :param action: pull
+      :param repository: repository name
+    """
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/templates/py/git_post_receive_hook.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,46 @@
+"""Kallithea Git hook
+
+This hook is installed and maintained by Kallithea. It will be overwritten
+by Kallithea - don't customize it manually!
+
+When Kallithea invokes Git, the KALLITHEA_EXTRAS environment variable will
+contain additional info like the Kallithea instance and user info that this
+hook will use.
+"""
+
+import os
+import subprocess
+import sys
+
+import kallithea.bin.vcs_hooks
+
+
+# Set output mode on windows to binary for stderr.
+# This prevents python (or the windows console) from replacing \n with \r\n.
+# Git doesn't display remote output lines that contain \r,
+# and therefore without this modification git would display empty lines
+# instead of the exception output.
+if sys.platform == "win32":
+    import msvcrt
+    msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)  # pytype: disable=module-attr
+
+KALLITHEA_HOOK_VER = '_TMPL_'
+os.environ['KALLITHEA_HOOK_VER'] = KALLITHEA_HOOK_VER
+
+
+def main():
+    repo_path = os.path.abspath('.')
+    git_stdin_lines = sys.stdin.readlines()
+    status = kallithea.bin.vcs_hooks.post_receive(repo_path, git_stdin_lines)
+
+    custom_hook = os.path.join(repo_path, 'hooks', 'post-receive-custom')
+    custom_status = None
+    if os.access(custom_hook, os.X_OK):
+        result = subprocess.run([custom_hook], input=''.join(git_stdin_lines), universal_newlines=True)
+        custom_status = result.returncode
+
+    sys.exit(status or custom_status)
+
+
+if __name__ == '__main__':
+    main()
--- a/kallithea/templates/register.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/register.html	Thu May 27 21:27:37 2021 +0200
@@ -90,13 +90,14 @@
                 </div>
         </div>
         ${h.end_form()}
-        <script>'use strict';
+        <script>
+        'use strict';
         $(document).ready(function(){
             $('#username').focus();
         });
         </script>
     </div>
- </div>
- </div>
- </div>
- </div>
+</div>
+</div>
+</div>
+</div>
--- a/kallithea/templates/search/search.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/search/search.html	Thu May 27 21:27:37 2021 +0200
@@ -56,7 +56,6 @@
                     ${h.select('type',c.cur_type,[('content',_('File contents')),
                         ('commit',_('Commit messages')),
                         ('path',_('File names')),
-                        ##('repository',_('Repository names')),
                         ],
                         class_='form-control')}
                 </div>
@@ -81,8 +80,6 @@
         <%include file='search_path.html'/>
     %elif c.cur_type == 'commit':
         <%include file='search_commit.html'/>
-    %elif c.cur_type == 'repository':
-        <%include file='search_repository.html'/>
     %endif
     </div>
 </div>
--- a/kallithea/templates/summary/statistics.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/summary/statistics.html	Thu May 27 21:27:37 2021 +0200
@@ -22,7 +22,7 @@
 
 <%def name="main()">
 ${self.repo_context_bar('summary')}
-    <div class="panel panel-primary">
+<div class="panel panel-primary">
     <div class="panel-heading clearfix">
         ${self.breadcrumbs()}
     </div>
@@ -51,7 +51,8 @@
     </div>
 </div>
 
-<script>'use strict';
+<script>
+'use strict';
 var data = ${h.js(c.trending_languages)};
 var total = 0;
 var tbl = document.createElement('table');
@@ -118,7 +119,8 @@
 }
 
 </script>
-<script>'use strict';
+<script>
+'use strict';
 
 /**
  * Plots summary graph
--- a/kallithea/templates/summary/summary.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/templates/summary/summary.html	Thu May 27 21:27:37 2021 +0200
@@ -15,7 +15,7 @@
 
     ##REMOTE
     %if c.db_repo.clone_uri:
-       - <i class="icon-fork"></i>${_('Clone from')} "<a href="${h.url(str(h.hide_credentials(c.db_repo.clone_uri)))}">${h.hide_credentials(c.db_repo.clone_uri)}</a>"
+       - <i class="icon-fork"></i>${_('Clone from')} "<a href="${h.url(str(h.credentials_filter(c.db_repo.clone_uri)))}">${h.credentials_filter(c.db_repo.clone_uri)}</a>"
     %endif
 </%def>
 
@@ -27,7 +27,8 @@
   <link href="${h.url('atom_feed_home',repo_name=c.db_repo.repo_name,api_key=request.authuser.api_key)}" rel="alternate" title="${_('%s ATOM feed') % c.repo_name}" type="application/atom+xml" />
   <link href="${h.url('rss_feed_home',repo_name=c.db_repo.repo_name,api_key=request.authuser.api_key)}" rel="alternate" title="${_('%s RSS feed') % c.repo_name}" type="application/rss+xml" />
 
-  <script>'use strict';
+  <script>
+  'use strict';
   function redirect_hash_branch(){
     var branch = window.location.hash.replace(/^#(.*)/, '$1');
     if (branch){
@@ -53,12 +54,8 @@
             <div class="form-group form-inline">
                 <label>${_('Clone URL')}:</label>
                 <div id="clone-url">
-                  <div id="clone_by_name" class="input-group"
-                    %if c.ssh_repo_url:
-                        style="display:none"
-                    %endif
-                    >
-                    <span class="input-group-addon">${self.repolabel(c.db_repo)}</span>
+                  <div id="clone_by_name" class="input-group" style="${'display:none' if c.ssh_repo_url else ''}">
+                    <span class="input-group-addon">${self.repolabel(c.db_repo.repo_type)}</span>
                     <input class="form-control" size="80" readonly="readonly" value="${c.clone_repo_url}"/>
                     <span class="input-group-addon btn use-id">${_('Use ID')}</span>
                     %if c.ssh_repo_url:
@@ -66,7 +63,7 @@
                     %endif
                   </div>
                   <div id="clone_by_id" class="input-group" style="display:none">
-                    <span class="input-group-addon">${self.repolabel(c.db_repo)}</span>
+                    <span class="input-group-addon">${self.repolabel(c.db_repo.repo_type)}</span>
                     <input class="form-control" size="80" readonly="readonly" value="${c.clone_repo_url_id}"/>
                     <span class="input-group-addon btn use-name">${_('Use Name')}</span>
                     %if c.ssh_repo_url:
@@ -75,7 +72,7 @@
                   </div>
                   %if c.ssh_repo_url:
                   <div id="clone_ssh" class="input-group">
-                    <span class="input-group-addon">${self.repolabel(c.db_repo)}</span>
+                    <span class="input-group-addon">${self.repolabel(c.db_repo.repo_type)}</span>
                     <input id="ssh_url" class="form-control" size="80" readonly="readonly" value="${c.ssh_repo_url}"/>
                     <span class="input-group-addon btn use-name">${_('Use HTTP')}</span>
                   </div>
@@ -129,13 +126,13 @@
         </div>
         <ul id="summary-menu-stats" class="list-group pull-right">
             <li class="list-group-item">
-               <a title="${_('Owner')} ${c.db_repo.owner.email}">
+              <a title="${_('Owner')} ${c.db_repo.owner.email}">
                 <i class="icon-user"></i>${c.db_repo.owner.username}
                 ${h.gravatar_div(c.db_repo.owner.email, size=18, div_class="pull-right")}
               </a>
             </li>
             <li class="list-group-item">
-               <a title="${_('Followers')}" href="${h.url('repo_followers_home',repo_name=c.repo_name)}">
+              <a title="${_('Followers')}" href="${h.url('repo_followers_home',repo_name=c.repo_name)}">
                 <i class="icon-heart"></i>${_('Followers')}
                 <span class="badge pull-right" id="current_followers_count">${c.repository_followers}</span>
               </a>
@@ -183,7 +180,7 @@
             ${h.link_to(_('Latest Changes'),h.url('changelog_home',repo_name=c.repo_name))}
         %else:
             ${_('Quick Start')}
-         %endif
+        %endif
         </div>
     </div>
     <div class="panel-body">
@@ -206,12 +203,12 @@
 ${c.db_repo_scm_instance.alias} clone ${c.clone_repo_url}
 ${c.db_repo_scm_instance.alias} add README # add first file
 ${c.db_repo_scm_instance.alias} commit -m "Initial" # commit with message
-${c.db_repo_scm_instance.alias} push ${'origin master' if h.is_git(c.db_repo_scm_instance) else ''} # push changes back
+${c.db_repo_scm_instance.alias} push ${'origin master' if c.db_repo_scm_instance.alias == 'git' else ''} # push changes back
                 </pre>
 
                 <h4>${_('Existing repository?')}</h4>
                 <pre>
-                %if h.is_git(c.db_repo_scm_instance):
+                %if c.db_repo_scm_instance.alias == 'git':
 git remote add origin ${c.clone_repo_url}
 git push -u origin master
                 %else:
@@ -238,7 +235,8 @@
 </div>
 %endif
 
-<script>'use strict';
+<script>
+'use strict';
 $(document).ready(function(){
     $('#clone-url input').click(function(){
         if($(this).hasClass('selected')){
@@ -334,7 +332,8 @@
 </script>
 
 %if c.show_stats:
-<script>'use strict';
+<script>
+'use strict';
 $(document).ready(function(){
     var data = ${h.js(c.trending_languages)};
     var total = 0;
--- a/kallithea/tests/api/api_base.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/api/api_base.py	Thu May 27 21:27:37 2021 +0200
@@ -19,24 +19,27 @@
 import os
 import random
 import re
+import string
+from typing import Sized
 
 import mock
 import pytest
+from webtest import TestApp
 
 from kallithea.lib import ext_json
 from kallithea.lib.auth import AuthUser
 from kallithea.lib.utils2 import ascii_bytes
+from kallithea.model import db, meta
 from kallithea.model.changeset_status import ChangesetStatusModel
-from kallithea.model.db import ChangesetStatus, PullRequest, RepoGroup, Repository, Setting, Ui, User
 from kallithea.model.gist import GistModel
-from kallithea.model.meta import Session
+from kallithea.model.pull_request import PullRequestModel
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.scm import ScmModel
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
 from kallithea.tests import base
-from kallithea.tests.fixture import Fixture
+from kallithea.tests.fixture import Fixture, raise_exception
 
 
 API_URL = '/_admin/api'
@@ -63,10 +66,6 @@
 jsonify = lambda obj: ext_json.loads(ext_json.dumps(obj))
 
 
-def crash(*args, **kwargs):
-    raise Exception('Total Crash !')
-
-
 def api_call(test_obj, params):
     response = test_obj.app.post(API_URL, content_type='application/json',
                                  params=params)
@@ -78,23 +77,29 @@
     gr = fixture.create_user_group(name, cur_user=base.TEST_USER_ADMIN_LOGIN)
     UserGroupModel().add_user_to_group(user_group=gr,
                                        user=base.TEST_USER_ADMIN_LOGIN)
-    Session().commit()
+    meta.Session().commit()
     return gr
 
 
 def make_repo_group(name=TEST_REPO_GROUP):
     gr = fixture.create_repo_group(name, cur_user=base.TEST_USER_ADMIN_LOGIN)
-    Session().commit()
+    meta.Session().commit()
     return gr
 
 
 class _BaseTestApi(object):
-    REPO = None
-    REPO_TYPE = None
+    app: TestApp  # assigned by app_fixture in subclass TestController mixin
+    # assigned in subclass:
+    REPO: str
+    REPO_TYPE: str
+    TEST_REVISION: str
+    TEST_PR_SRC: str
+    TEST_PR_DST: str
+    TEST_PR_REVISIONS: Sized
 
     @classmethod
     def setup_class(cls):
-        cls.usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        cls.usr = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         cls.apikey = cls.usr.api_key
         cls.test_user = UserModel().create_or_update(
             username='test-api',
@@ -103,7 +108,7 @@
             firstname='first',
             lastname='last'
         )
-        Session().commit()
+        meta.Session().commit()
         cls.TEST_USER_LOGIN = cls.test_user.username
         cls.apikey_regular = cls.test_user.api_key
 
@@ -138,29 +143,6 @@
         given = ext_json.loads(given)
         assert expected == given, (expected, given)
 
-    def test_Optional_object(self):
-        from kallithea.controllers.api.api import Optional
-
-        option1 = Optional(None)
-        assert '<Optional:%s>' % None == repr(option1)
-        assert option1() is None
-
-        assert 1 == Optional.extract(Optional(1))
-        assert 'trololo' == Optional.extract('trololo')
-
-    def test_Optional_OAttr(self):
-        from kallithea.controllers.api.api import Optional, OAttr
-
-        option1 = Optional(OAttr('apiuser'))
-        assert 'apiuser' == Optional.extract(option1)
-
-    def test_OAttr_object(self):
-        from kallithea.controllers.api.api import OAttr
-
-        oattr1 = OAttr('apiuser')
-        assert '<OptionalAttr:apiuser>' == repr(oattr1)
-        assert oattr1() == oattr1
-
     def test_api_wrong_key(self):
         id_, params = _build_data('trololo', 'get_user')
         response = api_call(self, params)
@@ -204,7 +186,6 @@
         assert response.status == '200 OK'
 
     def test_api_args_different_args(self):
-        import string
         expected = {
             'ascii_letters': string.ascii_letters,
             'ws': string.whitespace,
@@ -219,8 +200,8 @@
         id_, params = _build_data(self.apikey, 'get_users', )
         response = api_call(self, params)
         ret_all = []
-        _users = User.query().filter_by(is_default_user=False) \
-            .order_by(User.username).all()
+        _users = db.User.query().filter_by(is_default_user=False) \
+            .order_by(db.User.username).all()
         for usr in _users:
             ret = usr.get_api_data()
             ret_all.append(jsonify(ret))
@@ -232,7 +213,7 @@
                                   userid=base.TEST_USER_ADMIN_LOGIN)
         response = api_call(self, params)
 
-        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        usr = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = usr.get_api_data()
         ret['permissions'] = AuthUser(dbuser=usr).permissions
 
@@ -251,7 +232,7 @@
         id_, params = _build_data(self.apikey, 'get_user')
         response = api_call(self, params)
 
-        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        usr = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = usr.get_api_data()
         ret['permissions'] = AuthUser(dbuser=usr).permissions
 
@@ -262,7 +243,7 @@
         id_, params = _build_data(self.apikey_regular, 'get_user')
         response = api_call(self, params)
 
-        usr = User.get_by_username(self.TEST_USER_LOGIN)
+        usr = db.User.get_by_username(self.TEST_USER_LOGIN)
         ret = usr.get_api_data()
         ret['permissions'] = AuthUser(dbuser=usr).permissions
 
@@ -284,10 +265,10 @@
         r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         # hack around that clone_uri can't be set to to a local path
         # (as shown by test_api_create_repo_clone_uri_local)
-        r.clone_uri = os.path.join(Ui.get_by_key('paths', '/').ui_value, self.REPO)
-        Session().commit()
-
-        pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == repo_name)]
+        r.clone_uri = os.path.join(db.Ui.get_by_key('paths', '/').ui_value, self.REPO)
+        meta.Session().commit()
+
+        pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in db.Repository.query().filter(db.Repository.repo_name == repo_name)]
 
         id_, params = _build_data(self.apikey, 'pull',
                                   repoid=repo_name,)
@@ -297,7 +278,7 @@
                     'repository': repo_name}
         self._compare_ok(id_, expected, given=response.body)
 
-        post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == repo_name)]
+        post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in db.Repository.query().filter(db.Repository.repo_name == repo_name)]
 
         fixture.destroy_repo(repo_name)
 
@@ -329,7 +310,7 @@
         repo_name = 'test_pull_custom_remote'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
-        custom_remote_path = os.path.join(Ui.get_by_key('paths', '/').ui_value, self.REPO)
+        custom_remote_path = os.path.join(db.Ui.get_by_key('paths', '/').ui_value, self.REPO)
 
         id_, params = _build_data(self.apikey, 'pull',
                                   repoid=repo_name,
@@ -349,7 +330,7 @@
         expected = {'added': [], 'removed': []}
         self._compare_ok(id_, expected, given=response.body)
 
-    @mock.patch.object(ScmModel, 'repo_scan', crash)
+    @mock.patch.object(ScmModel, 'repo_scan', raise_exception)
     def test_api_rescann_error(self):
         id_, params = _build_data(self.apikey, 'rescan_repos', )
         response = api_call(self, params)
@@ -387,7 +368,7 @@
                                   password='trololo')
         response = api_call(self, params)
 
-        usr = User.get_by_username(username)
+        usr = db.User.get_by_username(username)
         ret = dict(
             msg='created new user `%s`' % username,
             user=jsonify(usr.get_api_data())
@@ -408,7 +389,7 @@
                                   email=email)
         response = api_call(self, params)
 
-        usr = User.get_by_username(username)
+        usr = db.User.get_by_username(username)
         ret = dict(
             msg='created new user `%s`' % username,
             user=jsonify(usr.get_api_data())
@@ -428,7 +409,7 @@
                                   email=email, extern_name='internal')
         response = api_call(self, params)
 
-        usr = User.get_by_username(username)
+        usr = db.User.get_by_username(username)
         ret = dict(
             msg='created new user `%s`' % username,
             user=jsonify(usr.get_api_data())
@@ -439,7 +420,7 @@
         finally:
             fixture.destroy_user(usr.user_id)
 
-    @mock.patch.object(UserModel, 'create_or_update', crash)
+    @mock.patch.object(UserModel, 'create_or_update', raise_exception)
     def test_api_create_user_when_exception_happened(self):
 
         username = 'test_new_api_user'
@@ -458,7 +439,7 @@
                                            password='qweqwe',
                                            email='u232@example.com',
                                            firstname='u1', lastname='u1')
-        Session().commit()
+        meta.Session().commit()
         username = usr.username
         email = usr.email
         usr_id = usr.user_id
@@ -473,13 +454,13 @@
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
 
-    @mock.patch.object(UserModel, 'delete', crash)
+    @mock.patch.object(UserModel, 'delete', raise_exception)
     def test_api_delete_user_when_exception_happened(self):
         usr = UserModel().create_or_update(username='test_user',
                                            password='qweqwe',
                                            email='u232@example.com',
                                            firstname='u1', lastname='u1')
-        Session().commit()
+        meta.Session().commit()
         username = usr.username
 
         id_, params = _build_data(self.apikey, 'delete_user',
@@ -505,7 +486,7 @@
         ('password', 'newpass'),
     ])
     def test_api_update_user(self, name, expected):
-        usr = User.get_by_username(self.TEST_USER_LOGIN)
+        usr = db.User.get_by_username(self.TEST_USER_LOGIN)
         kw = {name: expected,
               'userid': usr.user_id}
         id_, params = _build_data(self.apikey, 'update_user', **kw)
@@ -514,7 +495,7 @@
         ret = {
             'msg': 'updated user ID:%s %s' % (
                 usr.user_id, self.TEST_USER_LOGIN),
-            'user': jsonify(User \
+            'user': jsonify(db.User \
                 .get_by_username(self.TEST_USER_LOGIN) \
                 .get_api_data())
         }
@@ -523,7 +504,7 @@
         self._compare_ok(id_, expected, given=response.body)
 
     def test_api_update_user_no_changed_params(self):
-        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        usr = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = jsonify(usr.get_api_data())
         id_, params = _build_data(self.apikey, 'update_user',
                                   userid=base.TEST_USER_ADMIN_LOGIN)
@@ -538,7 +519,7 @@
         self._compare_ok(id_, expected, given=response.body)
 
     def test_api_update_user_by_user_id(self):
-        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        usr = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = jsonify(usr.get_api_data())
         id_, params = _build_data(self.apikey, 'update_user',
                                   userid=usr.user_id)
@@ -553,7 +534,7 @@
         self._compare_ok(id_, expected, given=response.body)
 
     def test_api_update_user_default_user(self):
-        usr = User.get_default_user()
+        usr = db.User.get_default_user()
         id_, params = _build_data(self.apikey, 'update_user',
                                   userid=usr.user_id)
 
@@ -561,9 +542,9 @@
         expected = 'editing default user is forbidden'
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(UserModel, 'update_user', crash)
+    @mock.patch.object(UserModel, 'update_user', raise_exception)
     def test_api_update_user_when_exception_happens(self):
-        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        usr = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         ret = jsonify(usr.get_api_data())
         id_, params = _build_data(self.apikey, 'update_user',
                                   userid=usr.user_id)
@@ -580,7 +561,7 @@
         RepoModel().grant_user_group_permission(repo=self.REPO,
                                                 group_name=new_group,
                                                 perm='repository.read')
-        Session().commit()
+        meta.Session().commit()
         id_, params = _build_data(self.apikey, 'get_repo',
                                   repoid=self.REPO)
         response = api_call(self, params)
@@ -632,7 +613,7 @@
         RepoModel().grant_user_permission(repo=self.REPO,
                                           user=self.TEST_USER_LOGIN,
                                           perm=grant_perm)
-        Session().commit()
+        meta.Session().commit()
         id_, params = _build_data(self.apikey_regular, 'get_repo',
                                   repoid=self.REPO)
         response = api_call(self, params)
@@ -671,7 +652,7 @@
 
     def test_api_get_repo_by_non_admin_no_permission_to_repo(self):
         RepoModel().grant_user_permission(repo=self.REPO,
-                                          user=User.DEFAULT_USER_NAME,
+                                          user=db.User.DEFAULT_USER_NAME,
                                           perm='repository.none')
         try:
             RepoModel().grant_user_permission(repo=self.REPO,
@@ -686,7 +667,7 @@
             self._compare_error(id_, expected, given=response.body)
         finally:
             RepoModel().grant_user_permission(repo=self.REPO,
-                                              user=User.DEFAULT_USER_NAME,
+                                              user=db.User.DEFAULT_USER_NAME,
                                               perm='repository.read')
 
     def test_api_get_repo_that_doesn_not_exist(self):
@@ -704,7 +685,7 @@
 
         expected = jsonify([
             repo.get_api_data()
-            for repo in Repository.query()
+            for repo in db.Repository.query()
         ])
 
         self._compare_ok(id_, expected, given=response.body)
@@ -715,7 +696,7 @@
 
         expected = jsonify([
             repo.get_api_data()
-            for repo in RepoModel().get_all_user_repos(self.TEST_USER_LOGIN)
+            for repo in AuthUser(dbuser=db.User.get_by_username(self.TEST_USER_LOGIN)).get_all_user_repos()
         ])
 
         self._compare_ok(id_, expected, given=response.body)
@@ -784,7 +765,7 @@
         RepoModel().grant_user_permission(repo=self.REPO,
                                           user=self.TEST_USER_LOGIN,
                                           perm=grant_perm)
-        Session().commit()
+        meta.Session().commit()
 
         rev = 'tip'
         path = '/'
@@ -816,7 +797,6 @@
         ret = {
             'msg': 'Created new repository `%s`' % repo_name,
             'success': True,
-            'task': None,
         }
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
@@ -877,7 +857,7 @@
 
         # create group before creating repo
         rg = fixture.create_repo_group(repo_group_name)
-        Session().commit()
+        meta.Session().commit()
 
         id_, params = _build_data(self.apikey, 'create_repo',
                                   repo_name=repo_name,
@@ -887,7 +867,6 @@
         expected = {
             'msg': 'Created new repository `%s`' % repo_name,
             'success': True,
-            'task': None,
         }
         self._compare_ok(id_, expected, given=response.body)
         repo = RepoModel().get_by_repo_name(repo_name)
@@ -901,14 +880,14 @@
         repo_group_name = '%s/%s' % (TEST_REPO_GROUP, repo_group_basename)
         repo_name = '%s/api-repo' % repo_group_name
 
-        top_group = RepoGroup.get_by_group_name(TEST_REPO_GROUP)
+        top_group = db.RepoGroup.get_by_group_name(TEST_REPO_GROUP)
         assert top_group
         rg = fixture.create_repo_group(repo_group_basename, parent_group_id=top_group)
-        Session().commit()
+        meta.Session().commit()
         RepoGroupModel().grant_user_permission(repo_group_name,
                                                self.TEST_USER_LOGIN,
                                                'group.none')
-        Session().commit()
+        meta.Session().commit()
 
         id_, params = _build_data(self.apikey_regular, 'create_repo',
                                   repo_name=repo_name,
@@ -949,7 +928,6 @@
         ret = {
             'msg': 'Created new repository `%s`' % repo_name,
             'success': True,
-            'task': None,
         }
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
@@ -969,7 +947,6 @@
         ret = {
             'msg': 'Created new repository `%s`' % repo_name,
             'success': True,
-            'task': None,
         }
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
@@ -1011,7 +988,7 @@
         self._compare_error(id_, expected, given=response.body)
         fixture.destroy_repo(repo_name)
 
-    @mock.patch.object(RepoModel, 'create', crash)
+    @mock.patch.object(RepoModel, 'create', raise_exception)
     def test_api_create_repo_exception_occurred(self):
         repo_name = 'api-repo'
         id_, params = _build_data(self.apikey, 'create_repo',
@@ -1131,7 +1108,7 @@
         finally:
             fixture.destroy_repo(repo_name)
 
-    @mock.patch.object(RepoModel, 'update', crash)
+    @mock.patch.object(RepoModel, 'update', raise_exception)
     def test_api_update_repo_exception_occurred(self):
         repo_name = 'api_update_me'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
@@ -1255,7 +1232,7 @@
         repo_name = 'api_delete_me'
         fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
         try:
-            with mock.patch.object(RepoModel, 'delete', crash):
+            with mock.patch.object(RepoModel, 'delete', raise_exception):
                 id_, params = _build_data(self.apikey, 'delete_repo',
                                           repoid=repo_name, )
                 response = api_call(self, params)
@@ -1278,7 +1255,6 @@
             'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
                                                      fork_name),
             'success': True,
-            'task': None,
         }
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
@@ -1302,7 +1278,6 @@
             'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
                                                      fork_name),
             'success': True,
-            'task': None,
         }
         expected = ret
         self._compare_ok(id_, expected, given=response.body)
@@ -1322,10 +1297,10 @@
 
     def test_api_fork_repo_non_admin_no_permission_to_fork(self):
         RepoModel().grant_user_permission(repo=self.REPO,
-                                          user=User.DEFAULT_USER_NAME,
+                                          user=db.User.DEFAULT_USER_NAME,
                                           perm='repository.none')
+        fork_name = 'api-repo-fork'
         try:
-            fork_name = 'api-repo-fork'
             id_, params = _build_data(self.apikey_regular, 'fork_repo',
                                       repoid=self.REPO,
                                       fork_name=fork_name,
@@ -1335,7 +1310,7 @@
             self._compare_error(id_, expected, given=response.body)
         finally:
             RepoModel().grant_user_permission(repo=self.REPO,
-                                              user=User.DEFAULT_USER_NAME,
+                                              user=db.User.DEFAULT_USER_NAME,
                                               perm='repository.read')
             fixture.destroy_repo(fork_name)
 
@@ -1406,7 +1381,7 @@
         expected = "repo `%s` already exist" % fork_name
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoModel, 'create_fork', crash)
+    @mock.patch.object(RepoModel, 'create_fork', raise_exception)
     def test_api_fork_repo_exception_occurred(self):
         fork_name = 'api-repo-fork'
         id_, params = _build_data(self.apikey, 'fork_repo',
@@ -1478,7 +1453,7 @@
         expected = "user group `%s` already exist" % TEST_USER_GROUP
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(UserGroupModel, 'create', crash)
+    @mock.patch.object(UserGroupModel, 'create', raise_exception)
     def test_api_get_user_group_exception_occurred(self):
         group_name = 'exception_happens'
         id_, params = _build_data(self.apikey, 'create_user_group',
@@ -1514,7 +1489,7 @@
                 gr_name = updates['group_name']
             fixture.destroy_user_group(gr_name)
 
-    @mock.patch.object(UserGroupModel, 'update', crash)
+    @mock.patch.object(UserGroupModel, 'update', raise_exception)
     def test_api_update_user_group_exception_occurred(self):
         gr_name = 'test_group'
         fixture.create_user_group(gr_name)
@@ -1553,7 +1528,7 @@
         expected = 'user group `%s` does not exist' % 'false-group'
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(UserGroupModel, 'add_user_to_group', crash)
+    @mock.patch.object(UserGroupModel, 'add_user_to_group', raise_exception)
     def test_api_add_user_to_user_group_exception_occurred(self):
         gr_name = 'test_group'
         fixture.create_user_group(gr_name)
@@ -1571,7 +1546,7 @@
         gr_name = 'test_group_3'
         gr = fixture.create_user_group(gr_name)
         UserGroupModel().add_user_to_group(gr, user=base.TEST_USER_ADMIN_LOGIN)
-        Session().commit()
+        meta.Session().commit()
         try:
             id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
                                       usergroupid=gr_name,
@@ -1586,12 +1561,12 @@
         finally:
             fixture.destroy_user_group(gr_name)
 
-    @mock.patch.object(UserGroupModel, 'remove_user_from_group', crash)
+    @mock.patch.object(UserGroupModel, 'remove_user_from_group', raise_exception)
     def test_api_remove_user_from_user_group_exception_occurred(self):
         gr_name = 'test_group_3'
         gr = fixture.create_user_group(gr_name)
         UserGroupModel().add_user_to_group(gr, user=base.TEST_USER_ADMIN_LOGIN)
-        Session().commit()
+        meta.Session().commit()
         try:
             id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
                                       usergroupid=gr_name,
@@ -1645,7 +1620,7 @@
                                   usergroupid=gr_name)
 
         try:
-            with mock.patch.object(UserGroupModel, 'delete', crash):
+            with mock.patch.object(UserGroupModel, 'delete', raise_exception):
                 response = api_call(self, params)
                 expected = 'failed to delete user group ID:%s %s' % (gr_id, gr_name)
                 self._compare_error(id_, expected, given=response.body)
@@ -1687,7 +1662,7 @@
         expected = 'permission `%s` does not exist' % perm
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoModel, 'grant_user_permission', crash)
+    @mock.patch.object(RepoModel, 'grant_user_permission', raise_exception)
     def test_api_grant_user_permission_exception_when_adding(self):
         perm = 'repository.read'
         id_, params = _build_data(self.apikey,
@@ -1717,7 +1692,7 @@
         }
         self._compare_ok(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoModel, 'revoke_user_permission', crash)
+    @mock.patch.object(RepoModel, 'revoke_user_permission', raise_exception)
     def test_api_revoke_user_permission_exception_when_adding(self):
         id_, params = _build_data(self.apikey,
                                   'revoke_user_permission',
@@ -1765,7 +1740,7 @@
         expected = 'permission `%s` does not exist' % perm
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoModel, 'grant_user_group_permission', crash)
+    @mock.patch.object(RepoModel, 'grant_user_group_permission', raise_exception)
     def test_api_grant_user_group_permission_exception_when_adding(self):
         perm = 'repository.read'
         id_, params = _build_data(self.apikey,
@@ -1784,7 +1759,7 @@
         RepoModel().grant_user_group_permission(repo=self.REPO,
                                                 group_name=TEST_USER_GROUP,
                                                 perm='repository.read')
-        Session().commit()
+        meta.Session().commit()
         id_, params = _build_data(self.apikey,
                                   'revoke_user_group_permission',
                                   repoid=self.REPO,
@@ -1799,7 +1774,7 @@
         }
         self._compare_ok(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoModel, 'revoke_user_group_permission', crash)
+    @mock.patch.object(RepoModel, 'revoke_user_group_permission', raise_exception)
     def test_api_revoke_user_group_permission_exception_when_adding(self):
         id_, params = _build_data(self.apikey,
                                   'revoke_user_group_permission',
@@ -1868,7 +1843,7 @@
             RepoGroupModel().grant_user_permission(TEST_REPO_GROUP,
                                                    self.TEST_USER_LOGIN,
                                                    'group.admin')
-            Session().commit()
+            meta.Session().commit()
 
         id_, params = _build_data(self.apikey_regular,
                                   'grant_user_permission_to_repo_group',
@@ -1901,7 +1876,7 @@
         expected = 'permission `%s` does not exist' % perm
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoGroupModel, 'grant_user_permission', crash)
+    @mock.patch.object(RepoGroupModel, 'grant_user_permission', raise_exception)
     def test_api_grant_user_permission_to_repo_group_exception_when_adding(self):
         perm = 'group.read'
         id_, params = _build_data(self.apikey,
@@ -1926,7 +1901,7 @@
         RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP,
                                                user=base.TEST_USER_ADMIN_LOGIN,
                                                perm='group.read',)
-        Session().commit()
+        meta.Session().commit()
 
         id_, params = _build_data(self.apikey,
                                   'revoke_user_permission_from_repo_group',
@@ -1960,13 +1935,13 @@
         RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP,
                                                user=base.TEST_USER_ADMIN_LOGIN,
                                                perm='group.read',)
-        Session().commit()
+        meta.Session().commit()
 
         if grant_admin:
             RepoGroupModel().grant_user_permission(TEST_REPO_GROUP,
                                                    self.TEST_USER_LOGIN,
                                                    'group.admin')
-            Session().commit()
+            meta.Session().commit()
 
         id_, params = _build_data(self.apikey_regular,
                                   'revoke_user_permission_from_repo_group',
@@ -1986,7 +1961,7 @@
             expected = 'repository group `%s` does not exist' % TEST_REPO_GROUP
             self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoGroupModel, 'revoke_user_permission', crash)
+    @mock.patch.object(RepoGroupModel, 'revoke_user_permission', raise_exception)
     def test_api_revoke_user_permission_from_repo_group_exception_when_adding(self):
         id_, params = _build_data(self.apikey,
                                   'revoke_user_permission_from_repo_group',
@@ -2056,7 +2031,7 @@
             RepoGroupModel().grant_user_permission(TEST_REPO_GROUP,
                                                    self.TEST_USER_LOGIN,
                                                    'group.admin')
-            Session().commit()
+            meta.Session().commit()
 
         id_, params = _build_data(self.apikey_regular,
                                   'grant_user_group_permission_to_repo_group',
@@ -2090,7 +2065,7 @@
         expected = 'permission `%s` does not exist' % perm
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoGroupModel, 'grant_user_group_permission', crash)
+    @mock.patch.object(RepoGroupModel, 'grant_user_group_permission', raise_exception)
     def test_api_grant_user_group_permission_exception_when_adding_to_repo_group(self):
         perm = 'group.read'
         id_, params = _build_data(self.apikey,
@@ -2115,7 +2090,7 @@
         RepoGroupModel().grant_user_group_permission(repo_group=TEST_REPO_GROUP,
                                                      group_name=TEST_USER_GROUP,
                                                      perm='group.read',)
-        Session().commit()
+        meta.Session().commit()
         id_, params = _build_data(self.apikey,
                                   'revoke_user_group_permission_from_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
@@ -2148,13 +2123,13 @@
         RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP,
                                                user=base.TEST_USER_ADMIN_LOGIN,
                                                perm='group.read',)
-        Session().commit()
+        meta.Session().commit()
 
         if grant_admin:
             RepoGroupModel().grant_user_permission(TEST_REPO_GROUP,
                                                    self.TEST_USER_LOGIN,
                                                    'group.admin')
-            Session().commit()
+            meta.Session().commit()
 
         id_, params = _build_data(self.apikey_regular,
                                   'revoke_user_group_permission_from_repo_group',
@@ -2174,7 +2149,7 @@
             expected = 'repository group `%s` does not exist' % TEST_REPO_GROUP
             self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(RepoGroupModel, 'revoke_user_group_permission', crash)
+    @mock.patch.object(RepoGroupModel, 'revoke_user_group_permission', raise_exception)
     def test_api_revoke_user_group_permission_from_repo_group_exception_when_adding(self):
         id_, params = _build_data(self.apikey, 'revoke_user_group_permission_from_repo_group',
                                   repogroupid=TEST_REPO_GROUP,
@@ -2295,7 +2270,7 @@
         }
         self._compare_ok(id_, expected, given=response.body)
 
-    @mock.patch.object(GistModel, 'create', crash)
+    @mock.patch.object(GistModel, 'create', raise_exception)
     def test_api_create_gist_exception_occurred(self):
         id_, params = _build_data(self.apikey_regular, 'create_gist',
                                   files={})
@@ -2327,7 +2302,7 @@
         expected = 'gist `%s` does not exist' % (gist_id,)
         self._compare_error(id_, expected, given=response.body)
 
-    @mock.patch.object(GistModel, 'delete', crash)
+    @mock.patch.object(GistModel, 'delete', raise_exception)
     def test_api_delete_gist_exception_occurred(self):
         gist_id = fixture.create_gist().gist_access_id
         id_, params = _build_data(self.apikey, 'delete_gist',
@@ -2348,7 +2323,7 @@
     def test_api_get_server_info(self):
         id_, params = _build_data(self.apikey, 'get_server_info')
         response = api_call(self, params)
-        expected = Setting.get_server_info()
+        expected = db.Setting.get_server_info()
         self._compare_ok(id_, expected, given=response.body)
 
     def test_api_get_changesets(self):
@@ -2449,7 +2424,7 @@
             "args": {"pullrequest_id": pull_request_id},
         }))
         response = api_call(self, params)
-        pullrequest = PullRequest().get(pull_request_id)
+        pullrequest = db.PullRequest().get(pull_request_id)
         expected = {
             "status": "new",
             "pull_request_id": pull_request_id,
@@ -2465,6 +2440,8 @@
             "statuses": [{"status": "under_review", "reviewer": base.TEST_USER_ADMIN_LOGIN, "modified_at": "2000-01-01T00:00:00"} for i in range(0, len(self.TEST_PR_REVISIONS))],
             "title": "get test",
             "revisions": self.TEST_PR_REVISIONS,
+            "created_on": "2000-01-01T00:00:00",
+            "updated_on": "2000-01-01T00:00:00",
         }
         self._compare_ok(random_id, expected,
                          given=re.sub(br"\d\d\d\d\-\d\d\-\d\dT\d\d\:\d\d\:\d\d",
@@ -2481,9 +2458,9 @@
         }))
         response = api_call(self, params)
         self._compare_ok(random_id, True, given=response.body)
-        pullrequest = PullRequest().get(pull_request_id)
+        pullrequest = db.PullRequest().get(pull_request_id)
         assert pullrequest.comments[-1].text == ''
-        assert pullrequest.status == PullRequest.STATUS_CLOSED
+        assert pullrequest.status == db.PullRequest.STATUS_CLOSED
         assert pullrequest.is_closed() == True
 
     def test_api_status_pullrequest(self):
@@ -2492,24 +2469,24 @@
         random_id = random.randrange(1, 9999)
         params = ascii_bytes(ext_json.dumps({
             "id": random_id,
-            "api_key": User.get_by_username(base.TEST_USER_REGULAR2_LOGIN).api_key,
+            "api_key": db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN).api_key,
             "method": "comment_pullrequest",
-            "args": {"pull_request_id": pull_request_id, "status": ChangesetStatus.STATUS_APPROVED},
+            "args": {"pull_request_id": pull_request_id, "status": db.ChangesetStatus.STATUS_APPROVED},
         }))
         response = api_call(self, params)
-        pullrequest = PullRequest().get(pull_request_id)
+        pullrequest = db.PullRequest().get(pull_request_id)
         self._compare_error(random_id, "No permission to change pull request status. User needs to be admin, owner or reviewer.", given=response.body)
-        assert ChangesetStatus.STATUS_UNDER_REVIEW == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
+        assert db.ChangesetStatus.STATUS_UNDER_REVIEW == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
         params = ascii_bytes(ext_json.dumps({
             "id": random_id,
-            "api_key": User.get_by_username(base.TEST_USER_REGULAR_LOGIN).api_key,
+            "api_key": db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN).api_key,
             "method": "comment_pullrequest",
-            "args": {"pull_request_id": pull_request_id, "status": ChangesetStatus.STATUS_APPROVED},
+            "args": {"pull_request_id": pull_request_id, "status": db.ChangesetStatus.STATUS_APPROVED},
         }))
         response = api_call(self, params)
         self._compare_ok(random_id, True, given=response.body)
-        pullrequest = PullRequest().get(pull_request_id)
-        assert ChangesetStatus.STATUS_APPROVED == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.ChangesetStatus.STATUS_APPROVED == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
 
     def test_api_comment_pullrequest(self):
         pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, "comment test")
@@ -2522,5 +2499,319 @@
         }))
         response = api_call(self, params)
         self._compare_ok(random_id, True, given=response.body)
-        pullrequest = PullRequest().get(pull_request_id)
+        pullrequest = db.PullRequest().get(pull_request_id)
         assert pullrequest.comments[-1].text == 'Looks good to me'
+
+    def test_api_edit_reviewers_add_single(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "add": base.TEST_USER_REGULAR2_LOGIN},
+        }))
+        response = api_call(self, params)
+        expected = { 'added': [base.TEST_USER_REGULAR2_LOGIN], 'already_present': [], 'removed': [] }
+
+        self._compare_ok(random_id, expected, given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_add_nonexistent(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "add": 999},
+        }))
+        response = api_call(self, params)
+
+        self._compare_error(random_id, "user `999` does not exist", given=response.body)
+
+    def test_api_edit_reviewers_add_multiple(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {
+                "pull_request_id": pull_request_id,
+                "add": [ self.TEST_USER_LOGIN, base.TEST_USER_REGULAR2_LOGIN ]
+            },
+        }))
+        response = api_call(self, params)
+        # list order depends on python sorting hash, which is randomized
+        assert set(ext_json.loads(response.body)['result']['added']) == set([base.TEST_USER_REGULAR2_LOGIN, self.TEST_USER_LOGIN])
+        assert set(ext_json.loads(response.body)['result']['already_present']) == set()
+        assert set(ext_json.loads(response.body)['result']['removed']) == set()
+
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(self.TEST_USER_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_add_already_present(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {
+                "pull_request_id": pull_request_id,
+                "add": [ base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR2_LOGIN ]
+            },
+        }))
+        response = api_call(self, params)
+        expected = { 'added': [base.TEST_USER_REGULAR2_LOGIN],
+                     'already_present': [base.TEST_USER_REGULAR_LOGIN],
+                     'removed': [],
+                   }
+
+        self._compare_ok(random_id, expected, given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_add_closed(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        pullrequest.owner = self.test_user
+        PullRequestModel().close_pull_request(pull_request_id)
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "add": base.TEST_USER_REGULAR2_LOGIN},
+        }))
+        response = api_call(self, params)
+        self._compare_error(random_id, "Cannot edit reviewers of a closed pull request.", given=response.body)
+
+    def test_api_edit_reviewers_add_not_owner(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        pullrequest.owner = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "add": base.TEST_USER_REGULAR2_LOGIN},
+        }))
+        response = api_call(self, params)
+        self._compare_error(random_id, "No permission to edit reviewers of this pull request. User needs to be admin or pull request owner.", given=response.body)
+
+
+    def test_api_edit_reviewers_remove_single(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "remove": base.TEST_USER_REGULAR_LOGIN},
+        }))
+        response = api_call(self, params)
+
+        expected = { 'added': [],
+                     'already_present': [],
+                     'removed': [base.TEST_USER_REGULAR_LOGIN],
+                   }
+        self._compare_ok(random_id, expected, given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) not in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_remove_nonexistent(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "remove": 999},
+        }))
+        response = api_call(self, params)
+
+        self._compare_error(random_id, "user `999` does not exist", given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_remove_nonpresent(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) not in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "remove": base.TEST_USER_REGULAR2_LOGIN},
+        }))
+        response = api_call(self, params)
+
+        # NOTE: no explicit indication that removed user was not even a reviewer
+        expected = { 'added': [],
+                     'already_present': [],
+                     'removed': [base.TEST_USER_REGULAR2_LOGIN],
+                   }
+        self._compare_ok(random_id, expected, given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) not in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_remove_multiple(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        prr = db.PullRequestReviewer(db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN), pullrequest)
+        meta.Session().add(prr)
+        meta.Session().commit()
+
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "remove": [ base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR2_LOGIN ] },
+        }))
+        response = api_call(self, params)
+
+        # list order depends on python sorting hash, which is randomized
+        assert set(ext_json.loads(response.body)['result']['added']) == set()
+        assert set(ext_json.loads(response.body)['result']['already_present']) == set()
+        assert set(ext_json.loads(response.body)['result']['removed']) == set([base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR2_LOGIN])
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) not in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) not in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_remove_closed(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+        PullRequestModel().close_pull_request(pull_request_id)
+
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "remove": base.TEST_USER_REGULAR_LOGIN},
+        }))
+        response = api_call(self, params)
+
+        self._compare_error(random_id, "Cannot edit reviewers of a closed pull request.", given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_remove_not_owner(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id, "remove": base.TEST_USER_REGULAR_LOGIN},
+        }))
+        response = api_call(self, params)
+
+        self._compare_error(random_id, "No permission to edit reviewers of this pull request. User needs to be admin or pull request owner.", given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_add_remove_single(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) not in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id,
+                     "add": base.TEST_USER_REGULAR2_LOGIN,
+                     "remove": base.TEST_USER_REGULAR_LOGIN
+                    },
+        }))
+        response = api_call(self, params)
+
+        expected = { 'added': [base.TEST_USER_REGULAR2_LOGIN],
+                     'already_present': [],
+                     'removed': [base.TEST_USER_REGULAR_LOGIN],
+                   }
+        self._compare_ok(random_id, expected, given=response.body)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) not in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_add_remove_multiple(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        prr = db.PullRequestReviewer(db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN), pullrequest)
+        meta.Session().add(prr)
+        meta.Session().commit()
+        assert db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) not in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = self.test_user
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id,
+                     "add": [ base.TEST_USER_REGULAR2_LOGIN ],
+                     "remove": [ base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_ADMIN_LOGIN ],
+                    },
+        }))
+        response = api_call(self, params)
+
+        # list order depends on python sorting hash, which is randomized
+        assert set(ext_json.loads(response.body)['result']['added']) == set([base.TEST_USER_REGULAR2_LOGIN])
+        assert set(ext_json.loads(response.body)['result']['already_present']) == set()
+        assert set(ext_json.loads(response.body)['result']['removed']) == set([base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_ADMIN_LOGIN])
+        assert db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN) not in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) not in pullrequest.get_reviewer_users()
+        assert db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN) in pullrequest.get_reviewer_users()
+
+    def test_api_edit_reviewers_invalid_params(self):
+        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'edit reviewer test')
+        pullrequest = db.PullRequest().get(pull_request_id)
+        assert db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) in pullrequest.get_reviewer_users()
+
+        pullrequest.owner = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        random_id = random.randrange(1, 9999)
+        params = ascii_bytes(ext_json.dumps({
+            "id": random_id,
+            "api_key": self.apikey_regular,
+            "method": "edit_reviewers",
+            "args": {"pull_request_id": pull_request_id},
+        }))
+        response = api_call(self, params)
+
+        self._compare_error(random_id, "Invalid request. Neither 'add' nor 'remove' is specified.", given=response.body)
+        assert ext_json.loads(response.body)['result'] is None
--- a/kallithea/tests/base.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/base.py	Thu May 27 21:27:37 2021 +0200
@@ -20,10 +20,11 @@
 import time
 
 import pytest
+from beaker.cache import cache_managers
 from webtest import TestApp
 
 from kallithea.lib.utils2 import ascii_str
-from kallithea.model.db import User
+from kallithea.model import db
 
 
 log = logging.getLogger(__name__)
@@ -126,7 +127,6 @@
     effect immediately.
     Note: Any use of this function is probably a workaround - it should be
     replaced with a more specific cache invalidation in code or test."""
-    from beaker.cache import cache_managers
     for cache in cache_managers.values():
         cache.clear()
 
@@ -138,6 +138,8 @@
 
 class TestController(object):
     """Pytest-style test controller"""
+    app: TestApp  # assigned by app_fixture
+    _logged_username: str  # assigned by log_user
 
     # Note: pytest base classes cannot have an __init__ method
 
@@ -166,12 +168,12 @@
         return response.session['authuser']
 
     def _get_logged_user(self):
-        return User.get_by_username(self._logged_username)
+        return db.User.get_by_username(self._logged_username)
 
     def assert_authenticated_user(self, response, expected_username):
         cookie = response.session.get('authuser')
         user = cookie and cookie.get('user_id')
-        user = user and User.get(user)
+        user = user and db.User.get(user)
         user = user and user.username
         assert user == expected_username
 
--- a/kallithea/tests/conftest.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/conftest.py	Thu May 27 21:27:37 2021 +0200
@@ -15,11 +15,11 @@
 from kallithea.controllers.root import RootController
 from kallithea.lib import inifile
 from kallithea.lib.utils import repo2db_mapper
-from kallithea.model.db import Setting, User, UserIpMap
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.scm import ScmModel
 from kallithea.model.user import UserModel
 from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH, invalidate_all_caches
+from kallithea.tests.fixture import create_test_env, create_test_index
 
 
 def pytest_configure():
@@ -59,18 +59,21 @@
             'formatter': 'color_formatter_sql',
         },
     }
-    if os.environ.get('TEST_DB'):
-        ini_settings['[app:main]']['sqlalchemy.url'] = os.environ.get('TEST_DB')
+    create_database = os.environ.get('TEST_DB')  # TODO: rename to 'CREATE_TEST_DB'
+    if create_database:
+        ini_settings['[app:main]']['sqlalchemy.url'] = create_database
+    reuse_database = os.environ.get('REUSE_TEST_DB')
+    if reuse_database:
+        ini_settings['[app:main]']['sqlalchemy.url'] = reuse_database
 
     test_ini_file = os.path.join(TESTS_TMP_PATH, 'test.ini')
     inifile.create(test_ini_file, None, ini_settings)
 
     context = loadwsgi.loadcontext(loadwsgi.APP, 'config:%s' % test_ini_file)
-    from kallithea.tests.fixture import create_test_env, create_test_index
 
     # set KALLITHEA_NO_TMP_PATH=1 to disable re-creating the database and test repos
     if not int(os.environ.get('KALLITHEA_NO_TMP_PATH', 0)):
-        create_test_env(TESTS_TMP_PATH, context.config())
+        create_test_env(TESTS_TMP_PATH, context.config(), reuse_database=bool(reuse_database))
 
     # set KALLITHEA_WHOOSH_TEST_DISABLE=1 to disable whoosh index during tests
     if not int(os.environ.get('KALLITHEA_WHOOSH_TEST_DISABLE', 0)):
@@ -100,17 +103,17 @@
     yield _create_test_user
     for user_id in test_user_ids:
         UserModel().delete(user_id)
-    Session().commit()
+    meta.Session().commit()
 
 
 def _set_settings(*kvtseq):
-    session = Session()
+    session = meta.Session()
     for kvt in kvtseq:
         assert len(kvt) in (2, 3)
         k = kvt[0]
         v = kvt[1]
         t = kvt[2] if len(kvt) == 3 else 'unicode'
-        Setting.create_or_update(k, v, t)
+        db.Setting.create_or_update(k, v, t)
     session.commit()
 
 
@@ -120,18 +123,18 @@
     # Save settings.
     settings_snapshot = [
         (s.app_settings_name, s.app_settings_value, s.app_settings_type)
-        for s in Setting.query().all()]
+        for s in db.Setting.query().all()]
     yield _set_settings
     # Restore settings.
-    session = Session()
+    session = meta.Session()
     keys = frozenset(k for (k, v, t) in settings_snapshot)
-    for s in Setting.query().all():
+    for s in db.Setting.query().all():
         if s.app_settings_name not in keys:
             session.delete(s)
     for k, v, t in settings_snapshot:
         if t == 'list' and hasattr(v, '__iter__'):
             v = ','.join(v) # Quirk: must format list value manually.
-        Setting.create_or_update(k, v, t)
+        db.Setting.create_or_update(k, v, t)
     session.commit()
 
 
@@ -146,15 +149,15 @@
 
     user_ids = []
     user_ids.append(kallithea.DEFAULT_USER_ID)
-    user_ids.append(User.get_by_username(TEST_USER_REGULAR_LOGIN).user_id)
+    user_ids.append(db.User.get_by_username(TEST_USER_REGULAR_LOGIN).user_id)
 
     for user_id in user_ids:
-        for ip in UserIpMap.query().filter(UserIpMap.user_id == user_id):
+        for ip in db.UserIpMap.query().filter(db.UserIpMap.user_id == user_id):
             user_model.delete_extra_ip(user_id, ip.ip_id)
 
     # IP permissions are cached, need to invalidate this cache explicitly
     invalidate_all_caches()
-    session = Session()
+    session = meta.Session()
     session.commit()
 
 
--- a/kallithea/tests/fixture.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/fixture.py	Thu May 27 21:27:37 2021 +0200
@@ -22,27 +22,27 @@
 import tarfile
 from os.path import dirname
 
-import mock
 from tg import request
 from tg.util.webtest import test_context
 
-from kallithea.lib import helpers
 from kallithea.lib.auth import AuthUser
 from kallithea.lib.db_manage import DbManage
+from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
+from kallithea.lib.pidlock import DaemonLock
 from kallithea.lib.vcs.backends.base import EmptyChangeset
+from kallithea.model import db, meta
 from kallithea.model.changeset_status import ChangesetStatusModel
 from kallithea.model.comment import ChangesetCommentsModel
-from kallithea.model.db import ChangesetStatus, Gist, RepoGroup, Repository, User, UserGroup
 from kallithea.model.gist import GistModel
-from kallithea.model.meta import Session
 from kallithea.model.pull_request import CreatePullRequestAction  # , CreatePullRequestIterationAction, PullRequestModel
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.scm import ScmModel
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
-from kallithea.tests.base import (GIT_REPO, HG_REPO, IP_ADDR, TEST_USER_ADMIN_EMAIL, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH,
-                                  invalidate_all_caches)
+from kallithea.tests.base import (GIT_REPO, HG_REPO, IP_ADDR, TEST_USER_ADMIN_EMAIL, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, TEST_USER_REGULAR2_EMAIL,
+                                  TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
+                                  TESTS_TMP_PATH, invalidate_all_caches)
 
 
 log = logging.getLogger(__name__)
@@ -50,8 +50,8 @@
 FIXTURES = os.path.join(dirname(dirname(os.path.abspath(__file__))), 'tests', 'fixtures')
 
 
-def error_function(*args, **kwargs):
-    raise Exception('Total Crash !')
+def raise_exception(*args, **kwargs):
+    raise Exception('raise_exception raised exception')
 
 
 class Fixture(object):
@@ -73,16 +73,16 @@
 
         class context(object):
             def __enter__(self):
-                anon = User.get_default_user()
+                anon = db.User.get_default_user()
                 self._before = anon.active
                 anon.active = status
-                Session().commit()
+                meta.Session().commit()
                 invalidate_all_caches()
 
             def __exit__(self, exc_type, exc_val, exc_tb):
-                anon = User.get_default_user()
+                anon = db.User.get_default_user()
                 anon.active = self._before
-                Session().commit()
+                meta.Session().commit()
 
         return context()
 
@@ -97,7 +97,7 @@
             repo_private=False,
             repo_landing_rev='rev:tip',
             repo_copy_permissions=False,
-            repo_state=Repository.STATE_CREATED,
+            repo_state=db.Repository.STATE_CREATED,
         )
         defs.update(custom)
         if 'repo_name_full' not in custom:
@@ -150,29 +150,28 @@
 
         return defs
 
-    def create_repo(self, name, repo_group=None, **kwargs):
+    def create_repo(self, name, repo_group=None, cur_user=TEST_USER_ADMIN_LOGIN, **kwargs):
         if 'skip_if_exists' in kwargs:
             del kwargs['skip_if_exists']
-            r = Repository.get_by_repo_name(name)
+            r = db.Repository.get_by_repo_name(name)
             if r:
                 return r
 
-        if isinstance(repo_group, RepoGroup):
+        if isinstance(repo_group, db.RepoGroup):
             repo_group = repo_group.group_id
 
         form_data = self._get_repo_create_params(repo_name=name, **kwargs)
         form_data['repo_group'] = repo_group # patch form dict so it can be used directly by model
-        cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
-        RepoModel().create(form_data, cur_user)
-        Session().commit()
+        RepoModel().create(form_data, cur_user=cur_user)
+        meta.Session().commit()
         ScmModel().mark_for_invalidation(name)
-        return Repository.get_by_repo_name(name)
+        return db.Repository.get_by_repo_name(name)
 
-    def create_fork(self, repo_to_fork, fork_name, **kwargs):
-        repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
+    def create_fork(self, repo_to_fork, fork_name, cur_user=TEST_USER_ADMIN_LOGIN, **kwargs):
+        repo_to_fork = db.Repository.get_by_repo_name(repo_to_fork)
 
         form_data = self._get_repo_create_params(repo_name=fork_name,
-                                            fork_parent_id=repo_to_fork,
+                                            fork_parent_id=repo_to_fork.repo_id,
                                             repo_type=repo_to_fork.repo_type,
                                             **kwargs)
         # patch form dict so it can be used directly by model
@@ -180,23 +179,22 @@
         form_data['private'] = form_data['repo_private']
         form_data['landing_rev'] = form_data['repo_landing_rev']
 
-        owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
-        RepoModel().create_fork(form_data, cur_user=owner)
-        Session().commit()
+        RepoModel().create_fork(form_data, cur_user=cur_user)
+        meta.Session().commit()
         ScmModel().mark_for_invalidation(fork_name)
-        r = Repository.get_by_repo_name(fork_name)
+        r = db.Repository.get_by_repo_name(fork_name)
         assert r
         return r
 
     def destroy_repo(self, repo_name, **kwargs):
         RepoModel().delete(repo_name, **kwargs)
-        Session().commit()
+        meta.Session().commit()
 
-    def create_repo_group(self, name, parent_group_id=None, **kwargs):
+    def create_repo_group(self, name, parent_group_id=None, cur_user=TEST_USER_ADMIN_LOGIN, **kwargs):
         assert '/' not in name, (name, kwargs) # use group_parent_id to make nested groups
         if 'skip_if_exists' in kwargs:
             del kwargs['skip_if_exists']
-            gr = RepoGroup.get_by_group_name(group_name=name)
+            gr = db.RepoGroup.get_by_group_name(group_name=name)
             if gr:
                 return gr
         form_data = self._get_repo_group_create_params(group_name=name, **kwargs)
@@ -204,58 +202,58 @@
             group_name=form_data['group_name'],
             group_description=form_data['group_name'],
             parent=parent_group_id,
-            owner=kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN),
+            owner=cur_user,
             )
-        Session().commit()
-        gr = RepoGroup.get_by_group_name(gr.group_name)
+        meta.Session().commit()
+        gr = db.RepoGroup.get_by_group_name(gr.group_name)
         return gr
 
     def destroy_repo_group(self, repogroupid):
         RepoGroupModel().delete(repogroupid)
-        Session().commit()
+        meta.Session().commit()
 
     def create_user(self, name, **kwargs):
         if 'skip_if_exists' in kwargs:
             del kwargs['skip_if_exists']
-            user = User.get_by_username(name)
+            user = db.User.get_by_username(name)
             if user:
                 return user
         form_data = self._get_user_create_params(name, **kwargs)
         user = UserModel().create(form_data)
-        Session().commit()
-        user = User.get_by_username(user.username)
+        meta.Session().commit()
+        user = db.User.get_by_username(user.username)
         return user
 
     def destroy_user(self, userid):
         UserModel().delete(userid)
-        Session().commit()
+        meta.Session().commit()
 
-    def create_user_group(self, name, **kwargs):
+    def create_user_group(self, name, cur_user=TEST_USER_ADMIN_LOGIN, **kwargs):
         if 'skip_if_exists' in kwargs:
             del kwargs['skip_if_exists']
-            gr = UserGroup.get_by_group_name(group_name=name)
+            gr = db.UserGroup.get_by_group_name(group_name=name)
             if gr:
                 return gr
         form_data = self._get_user_group_create_params(name, **kwargs)
-        owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
         user_group = UserGroupModel().create(
             name=form_data['users_group_name'],
             description=form_data['user_group_description'],
-            owner=owner, active=form_data['users_group_active'],
+            owner=cur_user,
+            active=form_data['users_group_active'],
             group_data=form_data['user_group_data'])
-        Session().commit()
-        user_group = UserGroup.get_by_group_name(user_group.users_group_name)
+        meta.Session().commit()
+        user_group = db.UserGroup.get_by_group_name(user_group.users_group_name)
         return user_group
 
     def destroy_user_group(self, usergroupid):
         UserGroupModel().delete(user_group=usergroupid, force=True)
-        Session().commit()
+        meta.Session().commit()
 
     def create_gist(self, **kwargs):
         form_data = {
             'description': 'new-gist',
             'owner': TEST_USER_ADMIN_LOGIN,
-            'gist_type': Gist.GIST_PUBLIC,
+            'gist_type': db.Gist.GIST_PUBLIC,
             'lifetime': -1,
             'gist_mapping': {'filename1.txt': {'content': 'hello world'}}
         }
@@ -265,18 +263,18 @@
             gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
             lifetime=form_data['lifetime']
         )
-        Session().commit()
+        meta.Session().commit()
 
         return gist
 
     def destroy_gists(self, gistid=None):
-        for g in Gist.query():
+        for g in db.Gist.query():
             if gistid:
                 if gistid == g.gist_access_id:
                     GistModel().delete(g)
             else:
                 GistModel().delete(g)
-        Session().commit()
+        meta.Session().commit()
 
     def load_resource(self, resource_name, strip=True):
         with open(os.path.join(FIXTURES, resource_name), 'rb') as f:
@@ -288,7 +286,7 @@
 
     def commit_change(self, repo, filename, content, message, vcs_type,
                       parent=None, newfile=False, author=None):
-        repo = Repository.get_by_repo_name(repo)
+        repo = db.Repository.get_by_repo_name(repo)
         _cs = parent
         if parent is None:
             _cs = EmptyChangeset(alias=vcs_type)
@@ -325,23 +323,23 @@
 
     def review_changeset(self, repo, revision, status, author=TEST_USER_ADMIN_LOGIN):
         comment = ChangesetCommentsModel().create("review comment", repo, author, revision=revision, send_email=False)
-        csm = ChangesetStatusModel().set_status(repo, ChangesetStatus.STATUS_APPROVED, author, comment, revision=revision)
-        Session().commit()
+        csm = ChangesetStatusModel().set_status(repo, db.ChangesetStatus.STATUS_APPROVED, author, comment, revision=revision)
+        meta.Session().commit()
         return csm
 
     def create_pullrequest(self, testcontroller, repo_name, pr_src_rev, pr_dst_rev, title='title'):
         org_ref = 'branch:stable:%s' % pr_src_rev
         other_ref = 'branch:default:%s' % pr_dst_rev
-        with test_context(testcontroller.app): # needed to be able to mock request user
-            org_repo = other_repo = Repository.get_by_repo_name(repo_name)
-            owner_user = User.get_by_username(TEST_USER_ADMIN_LOGIN)
-            reviewers = [User.get_by_username(TEST_USER_REGULAR_LOGIN)]
+        with test_context(testcontroller.app): # needed to be able to mock request user and routes.url
+            org_repo = other_repo = db.Repository.get_by_repo_name(repo_name)
+            owner_user = db.User.get_by_username(TEST_USER_ADMIN_LOGIN)
+            reviewers = [db.User.get_by_username(TEST_USER_REGULAR_LOGIN)]
             request.authuser = AuthUser(dbuser=owner_user)
             # creating a PR sends a message with an absolute URL - without routing that requires mocking
-            with mock.patch.object(helpers, 'url', (lambda arg, qualified=False, **kwargs: ('https://localhost' if qualified else '') + '/fake/' + arg)):
-                cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, 'No description', owner_user, reviewers)
-                pull_request = cmd.execute()
-            Session().commit()
+            request.environ['routes.url'] = lambda arg, qualified=False, **kwargs: ('https://localhost' if qualified else '') + '/fake/' + arg
+            cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, 'No description', owner_user, reviewers)
+            pull_request = cmd.execute()
+            meta.Session().commit()
         return pull_request.pull_request_id
 
 
@@ -349,7 +347,7 @@
 # Global test environment setup
 #==============================================================================
 
-def create_test_env(repos_test_path, config):
+def create_test_env(repos_test_path, config, reuse_database):
     """
     Makes a fresh database and
     install test repository into tmp dir
@@ -365,15 +363,22 @@
         os.makedirs(repos_test_path)
 
     dbmanage = DbManage(dbconf=dbconf, root=config['here'],
-                        tests=True)
-    dbmanage.create_tables(override=True)
+                        cli_args={
+                            'force_ask': True,
+                            'username': TEST_USER_ADMIN_LOGIN,
+                            'password': TEST_USER_ADMIN_PASS,
+                            'email': TEST_USER_ADMIN_EMAIL,
+                        })
+    dbmanage.create_tables(reuse_database=reuse_database)
     # for tests dynamically set new root paths based on generated content
     dbmanage.create_settings(dbmanage.prompt_repo_root_path(repos_test_path))
     dbmanage.create_default_user()
-    dbmanage.admin_prompt()
+    dbmanage.create_admin_user()
+    dbmanage.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, TEST_USER_REGULAR_EMAIL, False)
+    dbmanage.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL, False)
     dbmanage.create_permissions()
     dbmanage.populate_default_permissions()
-    Session().commit()
+    meta.Session().commit()
     # PART TWO make test repo
     log.debug('making test vcs repositories')
 
@@ -398,19 +403,12 @@
     tar.extractall(os.path.join(TESTS_TMP_PATH, GIT_REPO))
     tar.close()
 
-    # LOAD VCS test stuff
-    from kallithea.tests.vcs import setup_package
-    setup_package()
-
 
 def create_test_index(repo_location, config, full_index):
     """
     Makes default test index
     """
 
-    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
-    from kallithea.lib.pidlock import DaemonLock
-
     index_location = os.path.join(config['index_dir'])
     if not os.path.exists(index_location):
         os.makedirs(index_location)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/kallithea/tests/fixtures/git_diff_quoting.diff	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,53 @@
+diff --git "a/\"foo\"" "b/\"foo\""
+new file mode 100644
+index 0000000..8b13789
+--- /dev/null
++++ "b/\"foo\""
+@@ -0,0 +1 @@
++
+diff --git a/'foo' b/'foo'
+new file mode 100644
+index 0000000..8b13789
+--- /dev/null
++++ b/'foo'
+@@ -0,0 +1 @@
++
+diff --git "a/'foo'\"foo\"" "b/'foo'\"foo\""
+new file mode 100644
+index 0000000..8b13789
+--- /dev/null
++++ "b/'foo'\"foo\""
+@@ -0,0 +1 @@
++
+diff --git "a/a\r\nb" "b/a\r\nb"
+new file mode 100644
+index 0000000..30d74d2
+--- /dev/null
++++ "b/a\r\nb"
+@@ -0,0 +1 @@
++test
+\ No newline at end of file
+diff --git "a/foo\rfoo" "b/foo\rfoo"
+new file mode 100644
+index 0000000..e69de29
+diff --git a/foo bar b/foo bar
+new file mode 100644
+index 0000000..219ea2b
+--- /dev/null
++++ b/foo bar	
+@@ -0,0 +1 @@
++foo  bar
+\ No newline at end of file
+diff --git a/test b/test
+new file mode 100644
+index 0000000..9daeafb
+--- /dev/null
++++ b/test
+@@ -0,0 +1 @@
++test
+diff --git "a/esc\033foo" "b/esc\033foo"
+new file mode 100644
+index 0000000..e69de29
+diff --git "a/tab\tfoo" "b/tab\tfoo"
+new file mode 100644
+index 0000000..e69de29
--- a/kallithea/tests/fixtures/markuptest.diff	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/fixtures/markuptest.diff	Thu May 27 21:27:37 2021 +0200
@@ -1,7 +1,7 @@
 diff --git a/f b/f
 --- a/f	
 +++ b/f	
-@@ -51,6 +51,13 @@
+@@ -51,8 +51,15 @@
  	begin();
  	
 +	int foo;
@@ -17,3 +17,6 @@
  
 -	#define MIN_STEPS (48)
 +	#define MIN_STEPS (42)
+ 
+-	#define MORE_STEPS	+(48)	
++	#define LESS_STEPS	(42) 
--- a/kallithea/tests/functional/test_admin.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin.py	Thu May 27 21:27:37 2021 +0200
@@ -3,8 +3,7 @@
 import os
 from os.path import dirname
 
-from kallithea.model.db import UserLog
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.tests import base
 
 
@@ -15,8 +14,8 @@
 
     @classmethod
     def setup_class(cls):
-        UserLog.query().delete()
-        Session().commit()
+        db.UserLog.query().delete()
+        meta.Session().commit()
 
         def strptime(val):
             fmt = '%Y-%m-%d %H:%M:%S'
@@ -32,7 +31,7 @@
 
         with open(os.path.join(FIXTURES, 'journal_dump.csv')) as f:
             for row in csv.DictReader(f):
-                ul = UserLog()
+                ul = db.UserLog()
                 for k, v in row.items():
                     if k == 'action_date':
                         v = strptime(v)
@@ -40,13 +39,13 @@
                         # nullable due to FK problems
                         v = None
                     setattr(ul, k, v)
-                Session().add(ul)
-            Session().commit()
+                meta.Session().add(ul)
+            meta.Session().commit()
 
     @classmethod
     def teardown_class(cls):
-        UserLog.query().delete()
-        Session().commit()
+        db.UserLog.query().delete()
+        meta.Session().commit()
 
     def test_index(self):
         self.log_user()
--- a/kallithea/tests/functional/test_admin_auth_settings.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_auth_settings.py	Thu May 27 21:27:37 2021 +0200
@@ -1,4 +1,4 @@
-from kallithea.model.db import Setting
+from kallithea.model import db
 from kallithea.tests import base
 
 
@@ -47,7 +47,7 @@
         response = self.app.post(url=test_url, params=params)
         self.checkSessionFlash(response, 'Auth settings updated successfully')
 
-        new_settings = Setting.get_auth_settings()
+        new_settings = db.Setting.get_auth_settings()
         assert new_settings['auth_ldap_host'] == 'dc.example.com', 'fail db write compare'
 
     @base.skipif(not base.ldap_lib_installed, reason='skipping due to missing ldap lib')
@@ -238,7 +238,7 @@
         response = self.app.post(url=test_url, params=params)
         self.checkSessionFlash(response, 'Auth settings updated successfully')
 
-        new_settings = Setting.get_auth_settings()
+        new_settings = db.Setting.get_auth_settings()
         assert new_settings['auth_crowd_host'] == 'hostname', 'fail db write compare'
 
     @base.skipif(not base.pam_lib_installed, reason='skipping due to missing pam lib')
@@ -255,5 +255,5 @@
         response = self.app.post(url=test_url, params=params)
         self.checkSessionFlash(response, 'Auth settings updated successfully')
 
-        new_settings = Setting.get_auth_settings()
+        new_settings = db.Setting.get_auth_settings()
         assert new_settings['auth_pam_service'] == 'kallithea', 'fail db write compare'
--- a/kallithea/tests/functional/test_admin_defaults.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_defaults.py	Thu May 27 21:27:37 2021 +0200
@@ -1,4 +1,4 @@
-from kallithea.model.db import Setting
+from kallithea.model import db
 from kallithea.tests import base
 
 
@@ -24,7 +24,7 @@
         self.checkSessionFlash(response, 'Default settings updated successfully')
 
         params.pop('_session_csrf_secret_token')
-        defs = Setting.get_default_repo_settings()
+        defs = db.Setting.get_default_repo_settings()
         assert params == defs
 
     def test_update_params_false_git(self):
@@ -40,5 +40,5 @@
         self.checkSessionFlash(response, 'Default settings updated successfully')
 
         params.pop('_session_csrf_secret_token')
-        defs = Setting.get_default_repo_settings()
+        defs = db.Setting.get_default_repo_settings()
         assert params == defs
--- a/kallithea/tests/functional/test_admin_gists.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_gists.py	Thu May 27 21:27:37 2021 +0200
@@ -1,6 +1,5 @@
-from kallithea.model.db import Gist, User
+from kallithea.model import db, meta
 from kallithea.model.gist import GistModel
-from kallithea.model.meta import Session
 from kallithea.tests import base
 
 
@@ -10,20 +9,20 @@
     gist_mapping = {
         f_name: {'content': content}
     }
-    owner = User.get_by_username(owner)
+    owner = db.User.get_by_username(owner)
     gist = GistModel().create(description, owner=owner, ip_addr=base.IP_ADDR,
                        gist_mapping=gist_mapping, gist_type=gist_type,
                        lifetime=lifetime)
-    Session().commit()
+    meta.Session().commit()
     return gist
 
 
 class TestGistsController(base.TestController):
 
     def teardown_method(self, method):
-        for g in Gist.query():
+        for g in db.Gist.query():
             GistModel().delete(g)
-        Session().commit()
+        meta.Session().commit()
 
     def test_index(self):
         self.log_user()
@@ -90,7 +89,7 @@
         self.log_user()
         gist = _create_gist('never-see-me')
         gist.gist_expires = 0  # 1970
-        Session().commit()
+        meta.Session().commit()
 
         response = self.app.get(base.url('gist', gist_id=gist.gist_access_id), status=404)
 
--- a/kallithea/tests/functional/test_admin_permissions.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_permissions.py	Thu May 27 21:27:37 2021 +0200
@@ -1,5 +1,5 @@
 import kallithea
-from kallithea.model.db import User, UserIpMap
+from kallithea.model import db
 from kallithea.tests import base
 
 
@@ -46,7 +46,7 @@
 
         # Delete latest IP and verify same IP is rejected again
 
-        x = UserIpMap.query().filter_by(ip_addr='0.0.1.0/24').first()
+        x = db.UserIpMap.query().filter_by(ip_addr='0.0.1.0/24').first()
         response = self.app.post(base.url('edit_user_ips_delete', id=default_user_id),
                                  params=dict(del_ip_id=x.ip_id,
                                              _session_csrf_secret_token=self.session_csrf_secret_token()))
@@ -57,7 +57,7 @@
 
         # Delete first IP and verify unlimited access again
 
-        x = UserIpMap.query().filter_by(ip_addr='0.0.0.0/24').first()
+        x = db.UserIpMap.query().filter_by(ip_addr='0.0.0.0/24').first()
         response = self.app.post(base.url('edit_user_ips_delete', id=default_user_id),
                                  params=dict(del_ip_id=x.ip_id,
                                              _session_csrf_secret_token=self.session_csrf_secret_token()))
@@ -72,7 +72,7 @@
         # Test response...
 
     def test_edit_permissions_permissions(self):
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
 
         # Test unauthenticated access - it will redirect to login page
         response = self.app.post(
--- a/kallithea/tests/functional/test_admin_repo_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_repo_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -1,4 +1,4 @@
-from kallithea.model.meta import Session
+from kallithea.model import meta
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.tests.base import TestController, url
 from kallithea.tests.fixture import Fixture
@@ -23,4 +23,4 @@
         response.mustcontain('already exists')
 
         RepoGroupModel().delete(group_name)
-        Session().commit()
+        meta.Session().commit()
--- a/kallithea/tests/functional/test_admin_repos.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_repos.py	Thu May 27 21:27:37 2021 +0200
@@ -6,25 +6,24 @@
 import mock
 import pytest
 
+import kallithea
 from kallithea.lib import vcs
-from kallithea.model import db
-from kallithea.model.db import Permission, Repository, Ui, User, UserRepoToPerm
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user import UserModel
 from kallithea.tests import base
-from kallithea.tests.fixture import Fixture, error_function
+from kallithea.tests.fixture import Fixture, raise_exception
 
 
 fixture = Fixture()
 
 
 def _get_permission_for_user(user, repo):
-    perm = UserRepoToPerm.query() \
-                .filter(UserRepoToPerm.repository ==
-                        Repository.get_by_repo_name(repo)) \
-                .filter(UserRepoToPerm.user == User.get_by_username(user)) \
+    perm = db.UserRepoToPerm.query() \
+                .filter(db.UserRepoToPerm.repository ==
+                        db.Repository.get_by_repo_name(repo)) \
+                .filter(db.UserRepoToPerm.user == db.User.get_by_username(user)) \
                 .all()
     return perm
 
@@ -61,8 +60,8 @@
                                % (repo_name, repo_name))
 
         # test if the repo was created in the database
-        new_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name).one()
+        new_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name).one()
 
         assert new_repo.repo_name == repo_name
         assert new_repo.description == description
@@ -74,12 +73,12 @@
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
+            vcs.get_repo(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name))
         except vcs.exceptions.VCSError:
             pytest.fail('no repo %s in filesystem' % repo_name)
 
         RepoModel().delete(repo_name)
-        Session().commit()
+        meta.Session().commit()
 
     def test_case_insensitivity(self):
         self.log_user()
@@ -102,7 +101,7 @@
         response.mustcontain('already exists')
 
         RepoModel().delete(repo_name)
-        Session().commit()
+        meta.Session().commit()
 
     def test_create_in_group(self):
         self.log_user()
@@ -112,10 +111,10 @@
         gr = RepoGroupModel().create(group_name=group_name,
                                      group_description='test',
                                      owner=base.TEST_USER_ADMIN_LOGIN)
-        Session().commit()
+        meta.Session().commit()
 
         repo_name = 'ingroup'
-        repo_name_full = db.URL_SEP.join([group_name, repo_name])
+        repo_name_full = kallithea.URL_SEP.join([group_name, repo_name])
         description = 'description for newly created repo'
         response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
@@ -131,8 +130,8 @@
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name_full, repo_name_full))
         # test if the repo was created in the database
-        new_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name_full).one()
+        new_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name_full).one()
         new_repo_id = new_repo.repo_id
 
         assert new_repo.repo_name == repo_name_full
@@ -143,21 +142,21 @@
         response.mustcontain(repo_name_full)
         response.mustcontain(self.REPO_TYPE)
 
-        inherited_perms = UserRepoToPerm.query() \
-            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
+        inherited_perms = db.UserRepoToPerm.query() \
+            .filter(db.UserRepoToPerm.repository_id == new_repo_id).all()
         assert len(inherited_perms) == 1
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
+            vcs.get_repo(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name_full))
         except vcs.exceptions.VCSError:
             RepoGroupModel().delete(group_name)
-            Session().commit()
+            meta.Session().commit()
             pytest.fail('no repo %s in filesystem' % repo_name)
 
         RepoModel().delete(repo_name_full)
         RepoGroupModel().delete(group_name)
-        Session().commit()
+        meta.Session().commit()
 
     def test_create_in_group_without_needed_permissions(self):
         usr = self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
@@ -166,33 +165,33 @@
         # revoke
         user_model = UserModel()
         # disable fork and create on default user
-        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.create.repository')
-        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.create.none')
-        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.fork.repository')
-        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.fork.none')
+        user_model.revoke_perm(db.User.DEFAULT_USER_NAME, 'hg.create.repository')
+        user_model.grant_perm(db.User.DEFAULT_USER_NAME, 'hg.create.none')
+        user_model.revoke_perm(db.User.DEFAULT_USER_NAME, 'hg.fork.repository')
+        user_model.grant_perm(db.User.DEFAULT_USER_NAME, 'hg.fork.none')
 
         # disable on regular user
         user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
         user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.none')
         user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
         user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
-        Session().commit()
+        meta.Session().commit()
 
         ## create GROUP
         group_name = 'reg_sometest_%s' % self.REPO_TYPE
         gr = RepoGroupModel().create(group_name=group_name,
                                      group_description='test',
                                      owner=base.TEST_USER_ADMIN_LOGIN)
-        Session().commit()
+        meta.Session().commit()
 
         group_name_allowed = 'reg_sometest_allowed_%s' % self.REPO_TYPE
         gr_allowed = RepoGroupModel().create(group_name=group_name_allowed,
                                      group_description='test',
                                      owner=base.TEST_USER_REGULAR_LOGIN)
-        Session().commit()
+        meta.Session().commit()
 
         repo_name = 'ingroup'
-        repo_name_full = db.URL_SEP.join([group_name, repo_name])
+        repo_name_full = kallithea.URL_SEP.join([group_name, repo_name])
         description = 'description for newly created repo'
         response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
@@ -206,7 +205,7 @@
 
         # user is allowed to create in this group
         repo_name = 'ingroup'
-        repo_name_full = db.URL_SEP.join([group_name_allowed, repo_name])
+        repo_name_full = kallithea.URL_SEP.join([group_name_allowed, repo_name])
         description = 'description for newly created repo'
         response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
@@ -223,8 +222,8 @@
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name_full, repo_name_full))
         # test if the repo was created in the database
-        new_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name_full).one()
+        new_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name_full).one()
         new_repo_id = new_repo.repo_id
 
         assert new_repo.repo_name == repo_name_full
@@ -235,22 +234,22 @@
         response.mustcontain(repo_name_full)
         response.mustcontain(self.REPO_TYPE)
 
-        inherited_perms = UserRepoToPerm.query() \
-            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
+        inherited_perms = db.UserRepoToPerm.query() \
+            .filter(db.UserRepoToPerm.repository_id == new_repo_id).all()
         assert len(inherited_perms) == 1
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
+            vcs.get_repo(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name_full))
         except vcs.exceptions.VCSError:
             RepoGroupModel().delete(group_name)
-            Session().commit()
+            meta.Session().commit()
             pytest.fail('no repo %s in filesystem' % repo_name)
 
         RepoModel().delete(repo_name_full)
         RepoGroupModel().delete(group_name)
         RepoGroupModel().delete(group_name_allowed)
-        Session().commit()
+        meta.Session().commit()
 
     def test_create_in_group_inherit_permissions(self):
         self.log_user()
@@ -260,14 +259,14 @@
         gr = RepoGroupModel().create(group_name=group_name,
                                      group_description='test',
                                      owner=base.TEST_USER_ADMIN_LOGIN)
-        perm = Permission.get_by_key('repository.write')
+        perm = db.Permission.get_by_key('repository.write')
         RepoGroupModel().grant_user_permission(gr, base.TEST_USER_REGULAR_LOGIN, perm)
 
         ## add repo permissions
-        Session().commit()
+        meta.Session().commit()
 
         repo_name = 'ingroup_inherited_%s' % self.REPO_TYPE
-        repo_name_full = db.URL_SEP.join([group_name, repo_name])
+        repo_name_full = kallithea.URL_SEP.join([group_name, repo_name])
         description = 'description for newly created repo'
         response = self.app.post(base.url('repos'),
                         fixture._get_repo_create_params(repo_private=False,
@@ -284,8 +283,8 @@
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name_full, repo_name_full))
         # test if the repo was created in the database
-        new_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name_full).one()
+        new_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name_full).one()
         new_repo_id = new_repo.repo_id
 
         assert new_repo.repo_name == repo_name_full
@@ -298,15 +297,15 @@
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
+            vcs.get_repo(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name_full))
         except vcs.exceptions.VCSError:
             RepoGroupModel().delete(group_name)
-            Session().commit()
+            meta.Session().commit()
             pytest.fail('no repo %s in filesystem' % repo_name)
 
         # check if inherited permissiona are applied
-        inherited_perms = UserRepoToPerm.query() \
-            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
+        inherited_perms = db.UserRepoToPerm.query() \
+            .filter(db.UserRepoToPerm.repository_id == new_repo_id).all()
         assert len(inherited_perms) == 2
 
         assert base.TEST_USER_REGULAR_LOGIN in [x.user.username
@@ -316,7 +315,7 @@
 
         RepoModel().delete(repo_name_full)
         RepoGroupModel().delete(group_name)
-        Session().commit()
+        meta.Session().commit()
 
     def test_create_remote_repo_wrong_clone_uri(self):
         self.log_user()
@@ -373,8 +372,8 @@
                                'Created repository <a href="/%s">%s</a>'
                                % (repo_name, repo_name))
         # test if the repo was created in the database
-        new_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name).one()
+        new_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name).one()
 
         assert new_repo.repo_name == repo_name
         assert new_repo.description == description
@@ -386,7 +385,7 @@
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
+            vcs.get_repo(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name))
         except vcs.exceptions.VCSError:
             pytest.fail('no repo %s in filesystem' % repo_name)
 
@@ -398,12 +397,12 @@
         response.follow()
 
         # check if repo was deleted from db
-        deleted_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name).scalar()
+        deleted_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name).scalar()
 
         assert deleted_repo is None
 
-        assert os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)) == False
+        assert os.path.isdir(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name)) == False
 
     def test_delete_non_ascii(self):
         self.log_user()
@@ -423,8 +422,8 @@
                                'Created repository <a href="/%s">%s</a>'
                                % (urllib.parse.quote(repo_name), repo_name))
         # test if the repo was created in the database
-        new_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name).one()
+        new_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name).one()
 
         assert new_repo.repo_name == repo_name
         assert new_repo.description == description
@@ -436,7 +435,7 @@
 
         # test if the repository was created on filesystem
         try:
-            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
+            vcs.get_repo(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name))
         except vcs.exceptions.VCSError:
             pytest.fail('no repo %s in filesystem' % repo_name)
 
@@ -446,12 +445,12 @@
         response.follow()
 
         # check if repo was deleted from db
-        deleted_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == repo_name).scalar()
+        deleted_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == repo_name).scalar()
 
         assert deleted_repo is None
 
-        assert os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)) == False
+        assert os.path.isdir(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name)) == False
 
     def test_delete_repo_with_group(self):
         # TODO:
@@ -474,7 +473,7 @@
         perm = _get_permission_for_user(user='default', repo=self.REPO)
         assert len(perm), 1
         assert perm[0].permission.permission_name == 'repository.read'
-        assert Repository.get_by_repo_name(self.REPO).private == False
+        assert db.Repository.get_by_repo_name(self.REPO).private == False
 
         response = self.app.post(base.url('update_repo', repo_name=self.REPO),
                         fixture._get_repo_create_params(repo_private=1,
@@ -484,7 +483,7 @@
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         self.checkSessionFlash(response,
                                msg='Repository %s updated successfully' % (self.REPO))
-        assert Repository.get_by_repo_name(self.REPO).private == True
+        assert db.Repository.get_by_repo_name(self.REPO).private == True
 
         # now the repo default permission should be None
         perm = _get_permission_for_user(user='default', repo=self.REPO)
@@ -499,7 +498,7 @@
                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
         self.checkSessionFlash(response,
                                msg='Repository %s updated successfully' % (self.REPO))
-        assert Repository.get_by_repo_name(self.REPO).private == False
+        assert db.Repository.get_by_repo_name(self.REPO).private == False
 
         # we turn off private now the repo default permission should stay None
         perm = _get_permission_for_user(user='default', repo=self.REPO)
@@ -507,12 +506,12 @@
         assert perm[0].permission.permission_name == 'repository.none'
 
         # update this permission back
-        perm[0].permission = Permission.get_by_key('repository.read')
-        Session().commit()
+        perm[0].permission = db.Permission.get_by_key('repository.read')
+        meta.Session().commit()
 
     def test_set_repo_fork_has_no_self_id(self):
         self.log_user()
-        repo = Repository.get_by_repo_name(self.REPO)
+        repo = db.Repository.get_by_repo_name(self.REPO)
         response = self.app.get(base.url('edit_repo_advanced', repo_name=self.REPO))
         opt = """<option value="%s">%s</option>""" % (repo.repo_id, self.REPO)
         response.mustcontain(no=[opt])
@@ -521,12 +520,12 @@
         self.log_user()
         other_repo = 'other_%s' % self.REPO_TYPE
         fixture.create_repo(other_repo, repo_type=self.REPO_TYPE)
-        repo = Repository.get_by_repo_name(self.REPO)
-        repo2 = Repository.get_by_repo_name(other_repo)
+        repo = db.Repository.get_by_repo_name(self.REPO)
+        repo2 = db.Repository.get_by_repo_name(other_repo)
         response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=repo2.repo_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
-        repo = Repository.get_by_repo_name(self.REPO)
-        repo2 = Repository.get_by_repo_name(other_repo)
+        repo = db.Repository.get_by_repo_name(self.REPO)
+        repo2 = db.Repository.get_by_repo_name(other_repo)
         self.checkSessionFlash(response,
             'Marked repository %s as fork of %s' % (repo.repo_name, repo2.repo_name))
 
@@ -542,12 +541,12 @@
 
     def test_set_fork_of_other_type_repo(self):
         self.log_user()
-        repo = Repository.get_by_repo_name(self.REPO)
-        repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
+        repo = db.Repository.get_by_repo_name(self.REPO)
+        repo2 = db.Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
         response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=repo2.repo_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
-        repo = Repository.get_by_repo_name(self.REPO)
-        repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
+        repo = db.Repository.get_by_repo_name(self.REPO)
+        repo2 = db.Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
         self.checkSessionFlash(response,
             'Cannot set repository as fork of repository with other type')
 
@@ -556,8 +555,8 @@
         ## mark it as None
         response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=None, _session_csrf_secret_token=self.session_csrf_secret_token()))
-        repo = Repository.get_by_repo_name(self.REPO)
-        repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
+        repo = db.Repository.get_by_repo_name(self.REPO)
+        repo2 = db.Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
         self.checkSessionFlash(response,
                                'Marked repository %s as fork of %s'
                                % (repo.repo_name, "Nothing"))
@@ -565,7 +564,7 @@
 
     def test_set_fork_of_same_repo(self):
         self.log_user()
-        repo = Repository.get_by_repo_name(self.REPO)
+        repo = db.Repository.get_by_repo_name(self.REPO)
         response = self.app.post(base.url('edit_repo_advanced_fork', repo_name=self.REPO),
                                 params=dict(id_fork_of=repo.repo_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
         self.checkSessionFlash(response,
@@ -576,20 +575,20 @@
         # revoke
         user_model = UserModel()
         # disable fork and create on default user
-        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.create.repository')
-        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.create.none')
-        user_model.revoke_perm(User.DEFAULT_USER_NAME, 'hg.fork.repository')
-        user_model.grant_perm(User.DEFAULT_USER_NAME, 'hg.fork.none')
+        user_model.revoke_perm(db.User.DEFAULT_USER_NAME, 'hg.create.repository')
+        user_model.grant_perm(db.User.DEFAULT_USER_NAME, 'hg.create.none')
+        user_model.revoke_perm(db.User.DEFAULT_USER_NAME, 'hg.fork.repository')
+        user_model.grant_perm(db.User.DEFAULT_USER_NAME, 'hg.fork.none')
 
         # disable on regular user
         user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
         user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.none')
         user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
         user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
-        Session().commit()
+        meta.Session().commit()
 
 
-        user = User.get(usr['user_id'])
+        user = db.User.get(usr['user_id'])
 
         repo_name = self.NEW_REPO + 'no_perms'
         description = 'description for newly created repo'
@@ -603,9 +602,9 @@
         response.mustcontain('<span class="error-message">Invalid value</span>')
 
         RepoModel().delete(repo_name)
-        Session().commit()
+        meta.Session().commit()
 
-    @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function)
+    @mock.patch.object(RepoModel, '_create_filesystem_repo', raise_exception)
     def test_create_repo_when_filesystem_op_fails(self):
         self.log_user()
         repo_name = self.NEW_REPO
@@ -621,11 +620,11 @@
         self.checkSessionFlash(response,
                                'Error creating repository %s' % repo_name)
         # repo must not be in db
-        repo = Repository.get_by_repo_name(repo_name)
+        repo = db.Repository.get_by_repo_name(repo_name)
         assert repo is None
 
         # repo must not be in filesystem !
-        assert not os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
+        assert not os.path.isdir(os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo_name))
 
 
 class TestAdminReposControllerGIT(_BaseTestCase):
@@ -644,7 +643,7 @@
     OTHER_TYPE = 'git'
 
     def test_permanent_url_protocol_access(self):
-        repo = Repository.get_by_repo_name(self.REPO)
+        repo = db.Repository.get_by_repo_name(self.REPO)
         permanent_name = '_%d' % repo.repo_id
 
         # 400 Bad Request - Unable to detect pull/push action
--- a/kallithea/tests/functional/test_admin_settings.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_settings.py	Thu May 27 21:27:37 2021 +0200
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 
-from kallithea.model.db import Setting, Ui
+from kallithea.model import db
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
@@ -33,6 +33,7 @@
     def test_index_hooks(self):
         self.log_user()
         response = self.app.get(base.url('admin_settings_hooks'))
+        response.mustcontain(no='.kallithea_')
 
     def test_create_custom_hook(self):
         self.log_user()
@@ -80,7 +81,7 @@
         response.mustcontain('test_hooks_2')
         response.mustcontain('cd %s2' % base.TESTS_TMP_PATH)
 
-        hook_id = Ui.get_by_key('hooks', 'test_hooks_2').ui_id
+        hook_id = db.Ui.get_by_key('hooks', 'test_hooks_2').ui_id
         ## delete
         self.app.post(base.url('admin_settings_hooks'),
                         params=dict(hook_id=hook_id, _session_csrf_secret_token=self.session_csrf_secret_token()))
@@ -91,14 +92,11 @@
     def test_add_existing_builtin_hook(self):
         self.log_user()
         response = self.app.post(base.url('admin_settings_hooks'),
-                                params=dict(new_hook_ui_key='changegroup.update',
+                                params=dict(new_hook_ui_key='changegroup.kallithea_update',
                                             new_hook_ui_value='attempted_new_value',
                                             _session_csrf_secret_token=self.session_csrf_secret_token()))
 
-        self.checkSessionFlash(response, 'Builtin hooks are read-only')
-        response = response.follow()
-        response.mustcontain('changegroup.update')
-        response.mustcontain('hg update &gt;&amp;2')
+        self.checkSessionFlash(response, 'reserved for internal use')
 
     def test_index_search(self):
         self.log_user()
@@ -124,7 +122,7 @@
 
         self.checkSessionFlash(response, 'Updated application settings')
 
-        assert Setting.get_app_settings()['ga_code'] == new_ga_code
+        assert db.Setting.get_app_settings()['ga_code'] == new_ga_code
 
         response = response.follow()
         response.mustcontain("""_gaq.push(['_setAccount', '%s']);""" % new_ga_code)
@@ -144,7 +142,7 @@
                                  ))
 
         self.checkSessionFlash(response, 'Updated application settings')
-        assert Setting.get_app_settings()['ga_code'] == new_ga_code
+        assert db.Setting.get_app_settings()['ga_code'] == new_ga_code
 
         response = response.follow()
         response.mustcontain(no=["_gaq.push(['_setAccount', '%s']);" % new_ga_code])
@@ -164,7 +162,7 @@
                                  ))
 
         self.checkSessionFlash(response, 'Updated application settings')
-        assert Setting.get_app_settings()['captcha_private_key'] == '1234567890'
+        assert db.Setting.get_app_settings()['captcha_private_key'] == '1234567890'
 
         response = self.app.get(base.url('register'))
         response.mustcontain('captcha')
@@ -184,7 +182,7 @@
                                  ))
 
         self.checkSessionFlash(response, 'Updated application settings')
-        assert Setting.get_app_settings()['captcha_private_key'] == ''
+        assert db.Setting.get_app_settings()['captcha_private_key'] == ''
 
         response = self.app.get(base.url('register'))
         response.mustcontain(no=['captcha'])
@@ -206,7 +204,7 @@
                                 ))
 
             self.checkSessionFlash(response, 'Updated application settings')
-            assert Setting.get_app_settings()['title'] == new_title
+            assert db.Setting.get_app_settings()['title'] == new_title
 
             response = response.follow()
             response.mustcontain("""<span class="branding">%s</span>""" % new_title)
--- a/kallithea/tests/functional/test_admin_user_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_user_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
-from kallithea.model.db import Permission, UserGroup, UserGroupToPerm
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.tests import base
 
 
@@ -52,14 +51,14 @@
         self.checkSessionFlash(response,
                                'Created user group ')
 
-        gr = Session().query(UserGroup) \
-            .filter(UserGroup.users_group_name == users_group_name).one()
+        gr = meta.Session().query(db.UserGroup) \
+            .filter(db.UserGroup.users_group_name == users_group_name).one()
 
         response = self.app.post(base.url('delete_users_group', id=gr.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
-        gr = Session().query(UserGroup) \
-            .filter(UserGroup.users_group_name == users_group_name).scalar()
+        gr = meta.Session().query(db.UserGroup) \
+            .filter(db.UserGroup.users_group_name == users_group_name).scalar()
 
         assert gr is None
 
@@ -73,7 +72,7 @@
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.follow()
 
-        ug = UserGroup.get_by_group_name(users_group_name)
+        ug = db.UserGroup.get_by_group_name(users_group_name)
         self.checkSessionFlash(response,
                                'Created user group ')
         ## ENABLE REPO CREATE ON A GROUP
@@ -82,14 +81,14 @@
                                  {'create_repo_perm': True,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.follow()
-        ug = UserGroup.get_by_group_name(users_group_name)
-        p = Permission.get_by_key('hg.create.repository')
-        p2 = Permission.get_by_key('hg.usergroup.create.false')
-        p3 = Permission.get_by_key('hg.fork.none')
+        ug = db.UserGroup.get_by_group_name(users_group_name)
+        p = db.Permission.get_by_key('hg.create.repository')
+        p2 = db.Permission.get_by_key('hg.usergroup.create.false')
+        p3 = db.Permission.get_by_key('hg.fork.none')
         # check if user has this perms, they should be here since
         # defaults are on
-        perms = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group == ug).all()
+        perms = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group == ug).all()
 
         assert sorted([[x.users_group_id, x.permission_id, ] for x in perms]) == sorted([[ug.users_group_id, p.permission_id],
                     [ug.users_group_id, p2.permission_id],
@@ -101,33 +100,33 @@
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         response.follow()
-        ug = UserGroup.get_by_group_name(users_group_name)
-        p = Permission.get_by_key('hg.create.none')
-        p2 = Permission.get_by_key('hg.usergroup.create.false')
-        p3 = Permission.get_by_key('hg.fork.none')
+        ug = db.UserGroup.get_by_group_name(users_group_name)
+        p = db.Permission.get_by_key('hg.create.none')
+        p2 = db.Permission.get_by_key('hg.usergroup.create.false')
+        p3 = db.Permission.get_by_key('hg.fork.none')
 
         # check if user has this perms, they should be here since
         # defaults are on
-        perms = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group == ug).all()
+        perms = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group == ug).all()
 
         assert sorted([[x.users_group_id, x.permission_id, ] for x in perms]) == sorted([[ug.users_group_id, p.permission_id],
                     [ug.users_group_id, p2.permission_id],
                     [ug.users_group_id, p3.permission_id]])
 
         # DELETE !
-        ug = UserGroup.get_by_group_name(users_group_name)
+        ug = db.UserGroup.get_by_group_name(users_group_name)
         ugid = ug.users_group_id
         response = self.app.post(base.url('delete_users_group', id=ug.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         response = response.follow()
-        gr = Session().query(UserGroup) \
-            .filter(UserGroup.users_group_name == users_group_name).scalar()
+        gr = meta.Session().query(db.UserGroup) \
+            .filter(db.UserGroup.users_group_name == users_group_name).scalar()
 
         assert gr is None
-        p = Permission.get_by_key('hg.create.repository')
-        perms = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group_id == ugid).all()
+        p = db.Permission.get_by_key('hg.create.repository')
+        perms = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group_id == ugid).all()
         perms = [[x.users_group_id,
                   x.permission_id, ] for x in perms]
         assert perms == []
@@ -142,7 +141,7 @@
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         response.follow()
 
-        ug = UserGroup.get_by_group_name(users_group_name)
+        ug = db.UserGroup.get_by_group_name(users_group_name)
         self.checkSessionFlash(response,
                                'Created user group ')
         ## ENABLE REPO CREATE ON A GROUP
@@ -151,14 +150,14 @@
                                  {'fork_repo_perm': True, '_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         response.follow()
-        ug = UserGroup.get_by_group_name(users_group_name)
-        p = Permission.get_by_key('hg.create.none')
-        p2 = Permission.get_by_key('hg.usergroup.create.false')
-        p3 = Permission.get_by_key('hg.fork.repository')
+        ug = db.UserGroup.get_by_group_name(users_group_name)
+        p = db.Permission.get_by_key('hg.create.none')
+        p2 = db.Permission.get_by_key('hg.usergroup.create.false')
+        p3 = db.Permission.get_by_key('hg.fork.repository')
         # check if user has this perms, they should be here since
         # defaults are on
-        perms = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group == ug).all()
+        perms = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group == ug).all()
 
         assert sorted([[x.users_group_id, x.permission_id, ] for x in perms]) == sorted([[ug.users_group_id, p.permission_id],
                     [ug.users_group_id, p2.permission_id],
@@ -169,33 +168,33 @@
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
         response.follow()
-        ug = UserGroup.get_by_group_name(users_group_name)
-        p = Permission.get_by_key('hg.create.none')
-        p2 = Permission.get_by_key('hg.usergroup.create.false')
-        p3 = Permission.get_by_key('hg.fork.none')
+        ug = db.UserGroup.get_by_group_name(users_group_name)
+        p = db.Permission.get_by_key('hg.create.none')
+        p2 = db.Permission.get_by_key('hg.usergroup.create.false')
+        p3 = db.Permission.get_by_key('hg.fork.none')
         # check if user has this perms, they should be here since
         # defaults are on
-        perms = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group == ug).all()
+        perms = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group == ug).all()
 
         assert sorted([[x.users_group_id, x.permission_id, ] for x in perms]) == sorted([[ug.users_group_id, p.permission_id],
                     [ug.users_group_id, p2.permission_id],
                     [ug.users_group_id, p3.permission_id]])
 
         # DELETE !
-        ug = UserGroup.get_by_group_name(users_group_name)
+        ug = db.UserGroup.get_by_group_name(users_group_name)
         ugid = ug.users_group_id
         response = self.app.post(base.url('delete_users_group', id=ug.users_group_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         response = response.follow()
-        gr = Session().query(UserGroup) \
-                           .filter(UserGroup.users_group_name ==
+        gr = meta.Session().query(db.UserGroup) \
+                           .filter(db.UserGroup.users_group_name ==
                                    users_group_name).scalar()
 
         assert gr is None
-        p = Permission.get_by_key('hg.fork.repository')
-        perms = UserGroupToPerm.query() \
-            .filter(UserGroupToPerm.users_group_id == ugid).all()
+        p = db.Permission.get_by_key('hg.fork.repository')
+        perms = db.UserGroupToPerm.query() \
+            .filter(db.UserGroupToPerm.users_group_id == ugid).all()
         perms = [[x.users_group_id,
                   x.permission_id, ] for x in perms]
         assert perms == []
--- a/kallithea/tests/functional/test_admin_users.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_admin_users.py	Thu May 27 21:27:37 2021 +0200
@@ -19,11 +19,9 @@
 
 import kallithea
 from kallithea.controllers.admin.users import UsersController
-from kallithea.lib import helpers as h
-from kallithea.lib.auth import check_password
-from kallithea.model import validators
-from kallithea.model.db import Permission, RepoGroup, User, UserApiKeys, UserSshKeys
-from kallithea.model.meta import Session
+from kallithea.lib import webutils
+from kallithea.lib.utils2 import check_password
+from kallithea.model import db, meta, validators
 from kallithea.model.user import UserModel
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
@@ -40,7 +38,7 @@
     repo_group = fixture.create_repo_group(name=groupname, cur_user=username)
     yield user, repo_group
     # cleanup
-    if RepoGroup.get_by_group_name(groupname):
+    if db.RepoGroup.get_by_group_name(groupname):
         fixture.destroy_repo_group(repo_group)
 
 
@@ -49,9 +47,9 @@
 
     @classmethod
     def teardown_class(cls):
-        if User.get_by_username(cls.test_user_1):
+        if db.User.get_by_username(cls.test_user_1):
             UserModel().delete(cls.test_user_1)
-            Session().commit()
+            meta.Session().commit()
 
     def test_index(self):
         self.log_user()
@@ -86,8 +84,8 @@
         response = response.follow()
         response.mustcontain("""%s user settings""" % username) # in <title>
 
-        new_user = Session().query(User). \
-            filter(User.username == username).one()
+        new_user = meta.Session().query(db.User). \
+            filter(db.User.username == username).one()
 
         assert new_user.username == username
         assert check_password(password, new_user.password) == True
@@ -114,13 +112,13 @@
 
         with test_context(self.app):
             msg = validators.ValidUsername(False, {})._messages['system_invalid_username']
-        msg = h.html_escape(msg % {'username': 'new_user'})
+        msg = webutils.html_escape(msg % {'username': 'new_user'})
         response.mustcontain("""<span class="error-message">%s</span>""" % msg)
         response.mustcontain("""<span class="error-message">Please enter a value</span>""")
         response.mustcontain("""<span class="error-message">An email address must contain a single @</span>""")
 
         def get_user():
-            Session().query(User).filter(User.username == username).one()
+            meta.Session().query(db.User).filter(db.User.username == username).one()
 
         with pytest.raises(NoResultFound):
             get_user(), 'found user in database'
@@ -151,7 +149,7 @@
                                   extern_type='internal',
                                   extern_name=self.test_user_1,
                                   skip_if_exists=True)
-        Session().commit()
+        meta.Session().commit()
         params = usr.get_api_data(True)
         params.update({'password_confirmation': ''})
         params.update({'new_password': ''})
@@ -172,7 +170,7 @@
         self.checkSessionFlash(response, 'User updated successfully')
         params.pop('_session_csrf_secret_token')
 
-        updated_user = User.get_by_username(self.test_user_1)
+        updated_user = db.User.get_by_username(self.test_user_1)
         updated_params = updated_user.get_api_data(True)
         updated_params.update({'password_confirmation': ''})
         updated_params.update({'new_password': ''})
@@ -185,8 +183,8 @@
 
         fixture.create_user(name=username)
 
-        new_user = Session().query(User) \
-            .filter(User.username == username).one()
+        new_user = meta.Session().query(db.User) \
+            .filter(db.User.username == username).one()
         response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
@@ -200,8 +198,8 @@
         fixture.create_user(name=username)
         fixture.create_repo(name=reponame, cur_user=username)
 
-        new_user = Session().query(User) \
-            .filter(User.username == username).one()
+        new_user = meta.Session().query(db.User) \
+            .filter(db.User.username == username).one()
         response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'User &quot;%s&quot; still '
@@ -232,7 +230,7 @@
                                '%s' % (username, groupname))
 
         # Relevant _if_ the user deletion succeeded to make sure we can render groups without owner
-        # rg = RepoGroup.get_by_group_name(group_name=groupname)
+        # rg = db.RepoGroup.get_by_group_name(group_name=groupname)
         # response = self.app.get(base.url('repos_groups', id=rg.group_id))
 
         response = self.app.post(base.url('delete_repo_group', group_name=groupname),
@@ -251,8 +249,8 @@
         fixture.create_user(name=username)
         ug = fixture.create_user_group(name=groupname, cur_user=username)
 
-        new_user = Session().query(User) \
-            .filter(User.username == username).one()
+        new_user = meta.Session().query(db.User) \
+            .filter(db.User.username == username).one()
         response = self.app.post(base.url('delete_user', id=new_user.user_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'User &quot;%s&quot; still '
@@ -272,18 +270,18 @@
 
     def test_edit(self):
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         response = self.app.get(base.url('edit_user', id=user.user_id))
 
     def test_add_perm_create_repo(self):
         self.log_user()
-        perm_none = Permission.get_by_key('hg.create.none')
-        perm_create = Permission.get_by_key('hg.create.repository')
+        perm_none = db.Permission.get_by_key('hg.create.none')
+        perm_create = db.Permission.get_by_key('hg.create.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
                                             email='dummy', firstname='a',
                                             lastname='b')
-        Session().commit()
+        meta.Session().commit()
         uid = user.user_id
 
         try:
@@ -295,25 +293,25 @@
                                      params=dict(create_repo_perm=True,
                                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
 
-            perm_none = Permission.get_by_key('hg.create.none')
-            perm_create = Permission.get_by_key('hg.create.repository')
+            perm_none = db.Permission.get_by_key('hg.create.none')
+            perm_create = db.Permission.get_by_key('hg.create.repository')
 
             # User should have None permission on creation repository
             assert UserModel().has_perm(uid, perm_none) == False
             assert UserModel().has_perm(uid, perm_create) == True
         finally:
             UserModel().delete(uid)
-            Session().commit()
+            meta.Session().commit()
 
     def test_revoke_perm_create_repo(self):
         self.log_user()
-        perm_none = Permission.get_by_key('hg.create.none')
-        perm_create = Permission.get_by_key('hg.create.repository')
+        perm_none = db.Permission.get_by_key('hg.create.none')
+        perm_create = db.Permission.get_by_key('hg.create.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
                                             email='dummy', firstname='a',
                                             lastname='b')
-        Session().commit()
+        meta.Session().commit()
         uid = user.user_id
 
         try:
@@ -324,25 +322,25 @@
             response = self.app.post(base.url('edit_user_perms_update', id=uid),
                                      params=dict(_session_csrf_secret_token=self.session_csrf_secret_token()))
 
-            perm_none = Permission.get_by_key('hg.create.none')
-            perm_create = Permission.get_by_key('hg.create.repository')
+            perm_none = db.Permission.get_by_key('hg.create.none')
+            perm_create = db.Permission.get_by_key('hg.create.repository')
 
             # User should have None permission on creation repository
             assert UserModel().has_perm(uid, perm_none) == True
             assert UserModel().has_perm(uid, perm_create) == False
         finally:
             UserModel().delete(uid)
-            Session().commit()
+            meta.Session().commit()
 
     def test_add_perm_fork_repo(self):
         self.log_user()
-        perm_none = Permission.get_by_key('hg.fork.none')
-        perm_fork = Permission.get_by_key('hg.fork.repository')
+        perm_none = db.Permission.get_by_key('hg.fork.none')
+        perm_fork = db.Permission.get_by_key('hg.fork.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
                                             email='dummy', firstname='a',
                                             lastname='b')
-        Session().commit()
+        meta.Session().commit()
         uid = user.user_id
 
         try:
@@ -354,25 +352,25 @@
                                      params=dict(create_repo_perm=True,
                                                  _session_csrf_secret_token=self.session_csrf_secret_token()))
 
-            perm_none = Permission.get_by_key('hg.create.none')
-            perm_create = Permission.get_by_key('hg.create.repository')
+            perm_none = db.Permission.get_by_key('hg.create.none')
+            perm_create = db.Permission.get_by_key('hg.create.repository')
 
             # User should have None permission on creation repository
             assert UserModel().has_perm(uid, perm_none) == False
             assert UserModel().has_perm(uid, perm_create) == True
         finally:
             UserModel().delete(uid)
-            Session().commit()
+            meta.Session().commit()
 
     def test_revoke_perm_fork_repo(self):
         self.log_user()
-        perm_none = Permission.get_by_key('hg.fork.none')
-        perm_fork = Permission.get_by_key('hg.fork.repository')
+        perm_none = db.Permission.get_by_key('hg.fork.none')
+        perm_fork = db.Permission.get_by_key('hg.fork.repository')
 
         user = UserModel().create_or_update(username='dummy', password='qwe',
                                             email='dummy', firstname='a',
                                             lastname='b')
-        Session().commit()
+        meta.Session().commit()
         uid = user.user_id
 
         try:
@@ -383,19 +381,19 @@
             response = self.app.post(base.url('edit_user_perms_update', id=uid),
                                      params=dict(_session_csrf_secret_token=self.session_csrf_secret_token()))
 
-            perm_none = Permission.get_by_key('hg.create.none')
-            perm_create = Permission.get_by_key('hg.create.repository')
+            perm_none = db.Permission.get_by_key('hg.create.none')
+            perm_create = db.Permission.get_by_key('hg.create.repository')
 
             # User should have None permission on creation repository
             assert UserModel().has_perm(uid, perm_none) == True
             assert UserModel().has_perm(uid, perm_create) == False
         finally:
             UserModel().delete(uid)
-            Session().commit()
+            meta.Session().commit()
 
     def test_ips(self):
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         response = self.app.get(base.url('edit_user_ips', id=user.user_id))
         response.mustcontain('All IP addresses are allowed')
 
@@ -409,7 +407,7 @@
     ])
     def test_add_ip(self, test_name, ip, ip_range, failure, auto_clear_ip_permissions):
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
         response = self.app.post(base.url('edit_user_ips_update', id=user_id),
@@ -428,13 +426,13 @@
 
     def test_delete_ip(self, auto_clear_ip_permissions):
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
         ip = '127.0.0.1/32'
         ip_range = '127.0.0.1 - 127.0.0.1'
         with test_context(self.app):
             new_ip = UserModel().add_extra_ip(user_id, ip)
-            Session().commit()
+            meta.Session().commit()
         new_ip_id = new_ip.ip_id
 
         response = self.app.get(base.url('edit_user_ips', id=user_id))
@@ -452,7 +450,7 @@
     def test_api_keys(self):
         self.log_user()
 
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         response = self.app.get(base.url('edit_user_api_keys', id=user.user_id))
         response.mustcontain(user.api_key)
         response.mustcontain('Expires: Never')
@@ -464,7 +462,7 @@
     ])
     def test_add_api_keys(self, desc, lifetime):
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
         response = self.app.post(base.url('edit_user_api_keys_update', id=user_id),
@@ -472,17 +470,17 @@
         self.checkSessionFlash(response, 'API key successfully created')
         try:
             response = response.follow()
-            user = User.get(user_id)
+            user = db.User.get(user_id)
             for api_key in user.api_keys:
                 response.mustcontain(api_key)
         finally:
-            for api_key in UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all():
-                Session().delete(api_key)
-                Session().commit()
+            for api_key in db.UserApiKeys.query().filter(db.UserApiKeys.user_id == user_id).all():
+                meta.Session().delete(api_key)
+                meta.Session().commit()
 
     def test_remove_api_key(self):
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
         response = self.app.post(base.url('edit_user_api_keys_update', id=user_id),
@@ -491,18 +489,18 @@
         response = response.follow()
 
         # now delete our key
-        keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
+        keys = db.UserApiKeys.query().filter(db.UserApiKeys.user_id == user_id).all()
         assert 1 == len(keys)
 
         response = self.app.post(base.url('edit_user_api_keys_delete', id=user_id),
                  {'del_api_key': keys[0].api_key, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully deleted')
-        keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
+        keys = db.UserApiKeys.query().filter(db.UserApiKeys.user_id == user_id).all()
         assert 0 == len(keys)
 
     def test_reset_main_api_key(self):
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
         api_key = user.api_key
         response = self.app.get(base.url('edit_user_api_keys', id=user_id))
@@ -521,7 +519,7 @@
         fingerprint = 'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
 
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
         response = self.app.post(base.url('edit_user_ssh_keys', id=user_id),
@@ -532,11 +530,11 @@
 
         response = response.follow()
         response.mustcontain(fingerprint)
-        ssh_key = UserSshKeys.query().filter(UserSshKeys.user_id == user_id).one()
+        ssh_key = db.UserSshKeys.query().filter(db.UserSshKeys.user_id == user_id).one()
         assert ssh_key.fingerprint == fingerprint
         assert ssh_key.description == description
-        Session().delete(ssh_key)
-        Session().commit()
+        meta.Session().delete(ssh_key)
+        meta.Session().commit()
 
     def test_remove_ssh_key(self):
         description = ''
@@ -544,7 +542,7 @@
         fingerprint = 'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
 
         self.log_user()
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         user_id = user.user_id
 
         response = self.app.post(base.url('edit_user_ssh_keys', id=user_id),
@@ -553,14 +551,14 @@
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'SSH key %s successfully added' % fingerprint)
         response.follow()
-        ssh_key = UserSshKeys.query().filter(UserSshKeys.user_id == user_id).one()
+        ssh_key = db.UserSshKeys.query().filter(db.UserSshKeys.user_id == user_id).one()
         assert ssh_key.description == 'me@localhost'
 
         response = self.app.post(base.url('edit_user_ssh_keys_delete', id=user_id),
                                  {'del_public_key_fingerprint': ssh_key.fingerprint,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'SSH key successfully deleted')
-        keys = UserSshKeys.query().all()
+        keys = db.UserSshKeys.query().all()
         assert 0 == len(keys)
 
 
@@ -571,11 +569,11 @@
         # flash complains about an non-existing session
         def flash_mock(*args, **kwargs):
             pass
-        monkeypatch.setattr(h, 'flash', flash_mock)
+        monkeypatch.setattr(webutils, 'flash', flash_mock)
 
         u = UsersController()
         # a regular user should work correctly
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         assert u._get_user_or_raise_if_default(user.user_id) == user
         # the default user should raise
         with pytest.raises(HTTPNotFound):
@@ -589,59 +587,59 @@
     """
     def test_edit_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.get(base.url('edit_user', id=user.user_id), status=404)
 
     def test_edit_advanced_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.get(base.url('edit_user_advanced', id=user.user_id), status=404)
 
     # API keys
     def test_edit_api_keys_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.get(base.url('edit_user_api_keys', id=user.user_id), status=404)
 
     def test_add_api_keys_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.post(base.url('edit_user_api_keys_update', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     def test_delete_api_keys_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.post(base.url('edit_user_api_keys_delete', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     # Permissions
     def test_edit_perms_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.get(base.url('edit_user_perms', id=user.user_id), status=404)
 
     def test_update_perms_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.post(base.url('edit_user_perms_update', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     # Emails
     def test_edit_emails_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.get(base.url('edit_user_emails', id=user.user_id), status=404)
 
     def test_add_emails_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.post(base.url('edit_user_emails_update', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
     def test_delete_emails_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.post(base.url('edit_user_emails_delete', id=user.user_id),
                  {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=404)
 
@@ -650,5 +648,5 @@
     # the global IP whitelist and thus allowed. Only 'edit' is forbidden.
     def test_edit_ip_default_user(self):
         self.log_user()
-        user = User.get_default_user()
+        user = db.User.get_default_user()
         response = self.app.get(base.url('edit_user_ips', id=user.user_id), status=404)
--- a/kallithea/tests/functional/test_changeset_pullrequests_comments.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_changeset_pullrequests_comments.py	Thu May 27 21:27:37 2021 +0200
@@ -1,17 +1,16 @@
 import re
 
+from kallithea.model import db, meta
 from kallithea.model.changeset_status import ChangesetStatusModel
-from kallithea.model.db import ChangesetComment, PullRequest
-from kallithea.model.meta import Session
 from kallithea.tests import base
 
 
 class TestChangeSetCommentsController(base.TestController):
 
     def setup_method(self, method):
-        for x in ChangesetComment.query().all():
-            Session().delete(x)
-        Session().commit()
+        for x in db.ChangesetComment.query().all():
+            meta.Session().delete(x)
+        meta.Session().commit()
 
     def test_create(self):
         self.log_user()
@@ -34,7 +33,7 @@
         response.mustcontain(text)
 
         # test DB
-        assert ChangesetComment.query().count() == 1
+        assert db.ChangesetComment.query().count() == 1
 
     def test_create_inline(self):
         self.log_user()
@@ -64,7 +63,7 @@
         response.mustcontain(text)
 
         # test DB
-        assert ChangesetComment.query().count() == 1
+        assert db.ChangesetComment.query().count() == 1
 
     def test_create_with_mention(self):
         self.log_user()
@@ -88,7 +87,7 @@
         response.mustcontain('<b>@%s</b> check CommentOnRevision' % base.TEST_USER_REGULAR_LOGIN)
 
         # test DB
-        assert ChangesetComment.query().count() == 1
+        assert db.ChangesetComment.query().count() == 1
 
     def test_create_status_change(self):
         self.log_user()
@@ -112,7 +111,7 @@
         response.mustcontain(text)
 
         # test DB
-        assert ChangesetComment.query().count() == 1
+        assert db.ChangesetComment.query().count() == 1
 
         # check status
         status = ChangesetStatusModel().get_status(repo=base.HG_REPO, revision=rev)
@@ -128,7 +127,7 @@
                                      repo_name=base.HG_REPO, revision=rev),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
 
-        comments = ChangesetComment.query().all()
+        comments = db.ChangesetComment.query().all()
         assert len(comments) == 1
         comment_id = comments[0].comment_id
 
@@ -137,7 +136,7 @@
                                     comment_id=comment_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
-        comments = ChangesetComment.query().all()
+        comments = db.ChangesetComment.query().all()
         assert len(comments) == 0
 
         response = self.app.get(base.url(controller='changeset', action='index',
@@ -152,9 +151,9 @@
 class TestPullrequestsCommentsController(base.TestController):
 
     def setup_method(self, method):
-        for x in ChangesetComment.query().all():
-            Session().delete(x)
-        Session().commit()
+        for x in db.ChangesetComment.query().all():
+            meta.Session().delete(x)
+        meta.Session().commit()
 
     def _create_pr(self):
         response = self.app.post(base.url(controller='pullrequests', action='create',
@@ -195,7 +194,7 @@
         response.mustcontain(text)
 
         # test DB
-        assert ChangesetComment.query().count() == 2
+        assert db.ChangesetComment.query().count() == 2
 
     def test_create_inline(self):
         self.log_user()
@@ -225,7 +224,7 @@
         response.mustcontain(text)
 
         # test DB
-        assert ChangesetComment.query().count() == 2
+        assert db.ChangesetComment.query().count() == 2
 
     def test_create_with_mention(self):
         self.log_user()
@@ -248,7 +247,7 @@
         response.mustcontain('<b>@%s</b> check CommentOnRevision' % base.TEST_USER_REGULAR_LOGIN)
 
         # test DB
-        assert ChangesetComment.query().count() == 2
+        assert db.ChangesetComment.query().count() == 2
 
     def test_create_status_change(self):
         self.log_user()
@@ -275,7 +274,7 @@
         response.mustcontain(text)
 
         # test DB
-        assert ChangesetComment.query().count() == 2
+        assert db.ChangesetComment.query().count() == 2
 
         # check status
         status = ChangesetStatusModel().get_status(repo=base.HG_REPO, pull_request=pr_id)
@@ -291,7 +290,7 @@
                                      repo_name=base.HG_REPO, pull_request_id=pr_id),
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
 
-        comments = ChangesetComment.query().all()
+        comments = db.ChangesetComment.query().all()
         assert len(comments) == 2
         comment_id = comments[-1].comment_id
 
@@ -300,7 +299,7 @@
                                     comment_id=comment_id),
             params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
-        comments = ChangesetComment.query().all()
+        comments = db.ChangesetComment.query().all()
         assert len(comments) == 1
 
         response = self.app.get(base.url(controller='pullrequests', action='show',
@@ -332,7 +331,7 @@
         response.mustcontain(text)
 
         # test DB
-        assert PullRequest.get(pr_id).status == PullRequest.STATUS_CLOSED
+        assert db.PullRequest.get(pr_id).status == db.PullRequest.STATUS_CLOSED
 
     def test_delete_pr(self):
         self.log_user()
@@ -351,7 +350,7 @@
                                 repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''), status=404)
 
         # test DB
-        assert PullRequest.get(pr_id) is None
+        assert db.PullRequest.get(pr_id) is None
 
     def test_delete_closed_pr(self):
         self.log_user()
@@ -374,4 +373,4 @@
                                      params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'}, status=403)
 
         # verify that PR still exists, in closed state
-        assert PullRequest.get(pr_id).status == PullRequest.STATUS_CLOSED
+        assert db.PullRequest.get(pr_id).status == db.PullRequest.STATUS_CLOSED
--- a/kallithea/tests/functional/test_compare.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_compare.py	Thu May 27 21:27:37 2021 +0200
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-from kallithea.model.meta import Session
+from kallithea.model import meta
 from kallithea.model.repo import RepoModel
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
@@ -23,8 +23,8 @@
             RepoModel().delete(self.r2_id)
         if self.r1_id:
             RepoModel().delete(self.r1_id)
-        Session().commit()
-        Session.remove()
+        meta.Session().commit()
+        meta.Session.remove()
 
     def test_compare_forks_on_branch_extra_commits_hg(self):
         self.log_user()
@@ -457,12 +457,12 @@
 
         cs0 = fixture.commit_change(repo=r1_name, filename='file1',
                 content='line1', message='commit1', vcs_type='hg', newfile=True)
-        Session().commit()
+        meta.Session().commit()
         assert repo1.scm_instance.revisions == [cs0.raw_id]
         # fork the repo1
         repo2 = fixture.create_fork(r1_name, 'one-fork',
                                     cur_user=base.TEST_USER_ADMIN_LOGIN)
-        Session().commit()
+        meta.Session().commit()
         assert repo2.scm_instance.revisions == [cs0.raw_id]
         self.r2_id = repo2.repo_id
         r2_name = repo2.repo_name
@@ -530,12 +530,12 @@
         cs0 = fixture.commit_change(repo=r1_name, filename='file1',
                 content='line1', message='commit1', vcs_type='git',
                 newfile=True)
-        Session().commit()
+        meta.Session().commit()
         assert repo1.scm_instance.revisions == [cs0.raw_id]
         # fork the repo1
         repo2 = fixture.create_fork(r1_name, 'one-git-fork',
                                     cur_user=base.TEST_USER_ADMIN_LOGIN)
-        Session().commit()
+        meta.Session().commit()
         assert repo2.scm_instance.revisions == [cs0.raw_id]
         self.r2_id = repo2.repo_id
         r2_name = repo2.repo_name
--- a/kallithea/tests/functional/test_files.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_files.py	Thu May 27 21:27:37 2021 +0200
@@ -3,8 +3,8 @@
 import mimetypes
 import posixpath
 
-from kallithea.model.db import Repository
-from kallithea.model.meta import Session
+from kallithea.lib import webutils
+from kallithea.model import db, meta
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
@@ -22,9 +22,9 @@
 
 
 def _set_downloads(repo_name, set_to):
-    repo = Repository.get_by_repo_name(repo_name)
+    repo = db.Repository.get_by_repo_name(repo_name)
     repo.enable_downloads = set_to
-    Session().commit()
+    meta.Session().commit()
 
 
 class TestFilesController(base.TestController):
@@ -96,8 +96,7 @@
     def test_file_source(self):
         # Force the global cache to be populated now when we know the right .ini has been loaded.
         # (Without this, the test would fail.)
-        import kallithea.lib.helpers
-        kallithea.lib.helpers._urlify_issues_f = None
+        webutils._urlify_issues_f = None
         self.log_user()
         response = self.app.get(base.url(controller='files', action='index',
                                     repo_name=base.HG_REPO,
--- a/kallithea/tests/functional/test_forks.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_forks.py	Thu May 27 21:27:37 2021 +0200
@@ -2,8 +2,7 @@
 
 import urllib.parse
 
-from kallithea.model.db import Repository, User
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo import RepoModel
 from kallithea.model.user import UserModel
 from kallithea.tests import base
@@ -27,11 +26,11 @@
         self.password = 'qweqwe'
         u1 = fixture.create_user(self.username, password=self.password, email='fork_king@example.com')
         self.u1_id = u1.user_id
-        Session().commit()
+        meta.Session().commit()
 
     def teardown_method(self, method):
         fixture.destroy_user(self.u1_id)
-        Session().commit()
+        meta.Session().commit()
 
     def test_index(self):
         self.log_user()
@@ -45,19 +44,19 @@
         self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)['user_id']
         try:
             user_model = UserModel()
-            usr = User.get_default_user()
+            usr = db.User.get_default_user()
             user_model.revoke_perm(usr, 'hg.fork.repository')
             user_model.grant_perm(usr, 'hg.fork.none')
-            Session().commit()
+            meta.Session().commit()
             # try create a fork
             repo_name = self.REPO
             self.app.post(base.url(controller='forks', action='fork_create',
                               repo_name=repo_name), {'_session_csrf_secret_token': self.session_csrf_secret_token()}, status=403)
         finally:
-            usr = User.get_default_user()
+            usr = db.User.get_default_user()
             user_model.revoke_perm(usr, 'hg.fork.none')
             user_model.grant_perm(usr, 'hg.fork.repository')
-            Session().commit()
+            meta.Session().commit()
 
     def test_index_with_fork(self):
         self.log_user()
@@ -66,7 +65,7 @@
         fork_name = self.REPO_FORK
         description = 'fork of vcs test'
         repo_name = self.REPO
-        org_repo = Repository.get_by_repo_name(repo_name)
+        org_repo = db.Repository.get_by_repo_name(repo_name)
         creation_args = {
             'repo_name': fork_name,
             'repo_group': '-1',
@@ -99,7 +98,7 @@
         fork_name_full = 'vc/%s' % fork_name
         description = 'fork of vcs test'
         repo_name = self.REPO
-        org_repo = Repository.get_by_repo_name(repo_name)
+        org_repo = db.Repository.get_by_repo_name(repo_name)
         creation_args = {
             'repo_name': fork_name,
             'repo_group': group_id,
@@ -111,7 +110,7 @@
             '_session_csrf_secret_token': self.session_csrf_secret_token()}
         self.app.post(base.url(controller='forks', action='fork_create',
                           repo_name=repo_name), creation_args)
-        repo = Repository.get_by_repo_name(fork_name_full)
+        repo = db.Repository.get_by_repo_name(fork_name_full)
         assert repo.fork.repo_name == self.REPO
 
         ## run the check page that triggers the flash message
@@ -122,8 +121,8 @@
                 % (repo_name, fork_name_full, fork_name_full))
 
         # test if the fork was created in the database
-        fork_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == fork_name_full).one()
+        fork_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == fork_name_full).one()
 
         assert fork_repo.repo_name == fork_name_full
         assert fork_repo.fork.repo_name == repo_name
@@ -142,7 +141,7 @@
 
         # create a fork
         repo_name = self.REPO
-        org_repo = Repository.get_by_repo_name(repo_name)
+        org_repo = db.Repository.get_by_repo_name(repo_name)
         fork_name = self.REPO_FORK + '-rødgrød'
         creation_args = {
             'repo_name': fork_name,
@@ -160,7 +159,7 @@
         response.mustcontain(
             """<a href="/%s">%s</a>""" % (urllib.parse.quote(fork_name), fork_name)
         )
-        fork_repo = Repository.get_by_repo_name(fork_name)
+        fork_repo = db.Repository.get_by_repo_name(fork_name)
         assert fork_repo
 
         # fork the fork
@@ -193,7 +192,7 @@
         fork_name = self.REPO_FORK
         description = 'fork of vcs test'
         repo_name = self.REPO
-        org_repo = Repository.get_by_repo_name(repo_name)
+        org_repo = db.Repository.get_by_repo_name(repo_name)
         creation_args = {
             'repo_name': fork_name,
             'repo_group': '-1',
@@ -205,7 +204,7 @@
             '_session_csrf_secret_token': self.session_csrf_secret_token()}
         self.app.post(base.url(controller='forks', action='fork_create',
                           repo_name=repo_name), creation_args)
-        repo = Repository.get_by_repo_name(self.REPO_FORK)
+        repo = db.Repository.get_by_repo_name(self.REPO_FORK)
         assert repo.fork.repo_name == self.REPO
 
         ## run the check page that triggers the flash message
@@ -216,8 +215,8 @@
                 % (repo_name, fork_name, fork_name))
 
         # test if the fork was created in the database
-        fork_repo = Session().query(Repository) \
-            .filter(Repository.repo_name == fork_name).one()
+        fork_repo = meta.Session().query(db.Repository) \
+            .filter(db.Repository.repo_name == fork_name).one()
 
         assert fork_repo.repo_name == fork_name
         assert fork_repo.fork.repo_name == repo_name
@@ -230,16 +229,16 @@
 
         usr = self.log_user(self.username, self.password)['user_id']
 
-        forks = Repository.query() \
-            .filter(Repository.repo_type == self.REPO_TYPE) \
-            .filter(Repository.fork_id != None).all()
+        forks = db.Repository.query() \
+            .filter(db.Repository.repo_type == self.REPO_TYPE) \
+            .filter(db.Repository.fork_id != None).all()
         assert 1 == len(forks)
 
         # set read permissions for this
         RepoModel().grant_user_permission(repo=forks[0],
                                           user=usr,
                                           perm='repository.read')
-        Session().commit()
+        meta.Session().commit()
 
         response = self.app.get(base.url(controller='forks', action='forks',
                                     repo_name=repo_name))
@@ -247,13 +246,13 @@
         response.mustcontain('<div>fork of vcs test</div>')
 
         # remove permissions
-        default_user = User.get_default_user()
+        default_user = db.User.get_default_user()
         try:
             RepoModel().grant_user_permission(repo=forks[0],
                                               user=usr, perm='repository.none')
             RepoModel().grant_user_permission(repo=forks[0],
                                               user=default_user, perm='repository.none')
-            Session().commit()
+            meta.Session().commit()
 
             # fork shouldn't be visible
             response = self.app.get(base.url(controller='forks', action='forks',
--- a/kallithea/tests/functional/test_home.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_home.py	Thu May 27 21:27:37 2021 +0200
@@ -1,7 +1,7 @@
 # -*- coding: utf-8 -*-
 import json
 
-from kallithea.model.meta import Session
+from kallithea.model import meta
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.tests import base
@@ -62,7 +62,7 @@
         finally:
             RepoModel().delete('gr1/repo_in_group')
             RepoGroupModel().delete(repo_group='gr1', force_delete=True)
-            Session().commit()
+            meta.Session().commit()
 
     def test_users_and_groups_data(self):
         fixture.create_user('evil', firstname='D\'o\'ct"o"r', lastname='Évíl')
--- a/kallithea/tests/functional/test_journal.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_journal.py	Thu May 27 21:27:37 2021 +0200
@@ -13,12 +13,12 @@
 
     def test_stop_following_repository(self):
         session = self.log_user()
-#        usr = Session().query(User).filter(User.username == TEST_USER_ADMIN_LOGIN).one()
-#        repo = Session().query(Repository).filter(Repository.repo_name == HG_REPO).one()
+#        usr = Session().query(User).filter(db.User.username == TEST_USER_ADMIN_LOGIN).one()
+#        repo = Session().query(db.Repository).filter(db.Repository.repo_name == HG_REPO).one()
 #
-#        followings = Session().query(UserFollowing) \
-#            .filter(UserFollowing.user == usr) \
-#            .filter(UserFollowing.follows_repository == repo).all()
+#        followings = Session().query(db.UserFollowing) \
+#            .filter(db.UserFollowing.user == usr) \
+#            .filter(db.UserFollowing.follows_repository == repo).all()
 #
 #        assert len(followings) == 1, 'Not following any repository'
 #
--- a/kallithea/tests/functional/test_login.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_login.py	Thu May 27 21:27:37 2021 +0200
@@ -6,14 +6,11 @@
 import mock
 from tg.util.webtest import test_context
 
-import kallithea.lib.celerylib.tasks
-from kallithea.lib import helpers as h
-from kallithea.lib.auth import check_password
-from kallithea.lib.utils2 import generate_api_key
-from kallithea.model import validators
+import kallithea.model.notification
+from kallithea.lib import webutils
+from kallithea.lib.utils2 import check_password, generate_api_key
+from kallithea.model import db, meta, validators
 from kallithea.model.api_key import ApiKeyModel
-from kallithea.model.db import User
-from kallithea.model.meta import Session
 from kallithea.model.user import UserModel
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
@@ -240,7 +237,7 @@
 
         with test_context(self.app):
             msg = validators.ValidUsername()._messages['username_exists']
-        msg = h.html_escape(msg % {'username': uname})
+        msg = webutils.html_escape(msg % {'username': uname})
         response.mustcontain(msg)
 
     def test_register_err_same_email(self):
@@ -313,7 +310,7 @@
         response.mustcontain('An email address must contain a single @')
         with test_context(self.app):
             msg = validators.ValidUsername()._messages['username_exists']
-        msg = h.html_escape(msg % {'username': usr})
+        msg = webutils.html_escape(msg % {'username': usr})
         response.mustcontain(msg)
 
     def test_register_special_chars(self):
@@ -362,7 +359,7 @@
         assert response.status == '302 Found'
         self.checkSessionFlash(response, 'You have successfully registered with Kallithea')
 
-        ret = Session().query(User).filter(User.username == 'test_regular4').one()
+        ret = meta.Session().query(db.User).filter(db.User.username == 'test_regular4').one()
         assert ret.username == username
         assert check_password(password, ret.password) == True
         assert ret.email == email
@@ -396,24 +393,24 @@
         lastname = 'reset'
         timestamp = int(time.time())
 
-        new = User()
+        new = db.User()
         new.username = username
         new.password = password
         new.email = email
         new.name = name
         new.lastname = lastname
         new.api_key = generate_api_key()
-        Session().add(new)
-        Session().commit()
+        meta.Session().add(new)
+        meta.Session().commit()
 
         token = UserModel().get_reset_password_token(
-            User.get_by_username(username), timestamp, self.session_csrf_secret_token())
+            db.User.get_by_username(username), timestamp, self.session_csrf_secret_token())
 
         collected = []
         def mock_send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
             collected.append((recipients, subject, body, html_body))
 
-        with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', mock_send_email), \
+        with mock.patch.object(kallithea.model.notification, 'send_email', mock_send_email), \
                 mock.patch.object(time, 'time', lambda: timestamp):
             response = self.app.post(base.url(controller='login',
                                          action='password_reset'),
@@ -496,7 +493,7 @@
                 headers = {}
             else:
                 if api_key is True:
-                    api_key = User.get_first_admin().api_key
+                    api_key = db.User.get_first_admin().api_key
                 params = {'api_key': api_key}
                 headers = {'Authorization': 'Bearer ' + str(api_key)}
 
@@ -522,13 +519,13 @@
 
     def test_access_page_via_extra_api_key(self):
         new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test')
-        Session().commit()
+        meta.Session().commit()
         self._api_key_test(new_api_key.api_key, status=200)
 
     def test_access_page_via_expired_api_key(self):
         new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test')
-        Session().commit()
+        meta.Session().commit()
         # patch the API key and make it expired
         new_api_key.expires = 0
-        Session().commit()
+        meta.Session().commit()
         self._api_key_test(new_api_key.api_key, status=403)
--- a/kallithea/tests/functional/test_my_account.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_my_account.py	Thu May 27 21:27:37 2021 +0200
@@ -2,9 +2,8 @@
 
 from tg.util.webtest import test_context
 
-from kallithea.lib import helpers as h
-from kallithea.model.db import Repository, User, UserApiKeys, UserFollowing, UserSshKeys
-from kallithea.model.meta import Session
+from kallithea.lib import webutils
+from kallithea.model import db, meta, validators
 from kallithea.model.user import UserModel
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
@@ -18,9 +17,9 @@
 
     @classmethod
     def teardown_class(cls):
-        if User.get_by_username(cls.test_user_1):
+        if db.User.get_by_username(cls.test_user_1):
             UserModel().delete(cls.test_user_1)
-            Session().commit()
+            meta.Session().commit()
 
     def test_my_account(self):
         self.log_user()
@@ -31,8 +30,8 @@
     def test_my_account_my_repos(self):
         self.log_user()
         response = self.app.get(base.url('my_account_repos'))
-        cnt = Repository.query().filter(Repository.owner ==
-                           User.get_by_username(base.TEST_USER_ADMIN_LOGIN)).count()
+        cnt = db.Repository.query().filter(db.Repository.owner ==
+                           db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)).count()
         response.mustcontain('"raw_name": "%s"' % base.HG_REPO)
         response.mustcontain('"just_name": "%s"' % base.GIT_REPO)
 
@@ -40,8 +39,8 @@
         self.log_user()
         response = self.app.get(base.url('my_account_watched'))
 
-        cnt = UserFollowing.query().filter(UserFollowing.user ==
-                            User.get_by_username(base.TEST_USER_ADMIN_LOGIN)).count()
+        cnt = db.UserFollowing.query().filter(db.UserFollowing.user ==
+                            db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)).count()
         response.mustcontain('"raw_name": "%s"' % base.HG_REPO)
         response.mustcontain('"just_name": "%s"' % base.GIT_REPO)
 
@@ -76,10 +75,9 @@
 
         response = self.app.get(base.url('my_account_emails'))
 
-        from kallithea.model.db import UserEmailMap
-        email_id = UserEmailMap.query() \
-            .filter(UserEmailMap.user == User.get_by_username(base.TEST_USER_ADMIN_LOGIN)) \
-            .filter(UserEmailMap.email == 'barz@example.com').one().email_id
+        email_id = db.UserEmailMap.query() \
+            .filter(db.UserEmailMap.user == db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)) \
+            .filter(db.UserEmailMap.email == 'barz@example.com').one().email_id
 
         response.mustcontain('barz@example.com')
         response.mustcontain('<input id="del_email_id" name="del_email_id" type="hidden" value="%s" />' % email_id)
@@ -128,7 +126,7 @@
         self.checkSessionFlash(response,
                                'Your account was updated successfully')
 
-        updated_user = User.get_by_username(self.test_user_1)
+        updated_user = db.User.get_by_username(self.test_user_1)
         updated_params = updated_user.get_api_data(True)
         updated_params.update({'password_confirmation': ''})
         updated_params.update({'new_password': ''})
@@ -184,16 +182,15 @@
                                             _session_csrf_secret_token=self.session_csrf_secret_token()))
 
         response.mustcontain('An email address must contain a single @')
-        from kallithea.model import validators
         with test_context(self.app):
             msg = validators.ValidUsername(edit=False, old_data={}) \
                     ._messages['username_exists']
-        msg = h.html_escape(msg % {'username': base.TEST_USER_ADMIN_LOGIN})
+        msg = webutils.html_escape(msg % {'username': base.TEST_USER_ADMIN_LOGIN})
         response.mustcontain(msg)
 
     def test_my_account_api_keys(self):
         usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
-        user = User.get(usr['user_id'])
+        user = db.User.get(usr['user_id'])
         response = self.app.get(base.url('my_account_api_keys'))
         response.mustcontain(user.api_key)
         response.mustcontain('Expires: Never')
@@ -205,41 +202,41 @@
     ])
     def test_my_account_add_api_keys(self, desc, lifetime):
         usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
-        user = User.get(usr['user_id'])
+        user = db.User.get(usr['user_id'])
         response = self.app.post(base.url('my_account_api_keys'),
                                  {'description': desc, 'lifetime': lifetime, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully created')
         try:
             response = response.follow()
-            user = User.get(usr['user_id'])
+            user = db.User.get(usr['user_id'])
             for api_key in user.api_keys:
                 response.mustcontain(api_key)
         finally:
-            for api_key in UserApiKeys.query().all():
-                Session().delete(api_key)
-                Session().commit()
+            for api_key in db.UserApiKeys.query().all():
+                meta.Session().delete(api_key)
+                meta.Session().commit()
 
     def test_my_account_remove_api_key(self):
         usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
-        user = User.get(usr['user_id'])
+        user = db.User.get(usr['user_id'])
         response = self.app.post(base.url('my_account_api_keys'),
                                  {'description': 'desc', 'lifetime': -1, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully created')
         response = response.follow()
 
         # now delete our key
-        keys = UserApiKeys.query().all()
+        keys = db.UserApiKeys.query().all()
         assert 1 == len(keys)
 
         response = self.app.post(base.url('my_account_api_keys_delete'),
                  {'del_api_key': keys[0].api_key, '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'API key successfully deleted')
-        keys = UserApiKeys.query().all()
+        keys = db.UserApiKeys.query().all()
         assert 0 == len(keys)
 
     def test_my_account_reset_main_api_key(self):
         usr = self.log_user(base.TEST_USER_REGULAR2_LOGIN, base.TEST_USER_REGULAR2_PASS)
-        user = User.get(usr['user_id'])
+        user = db.User.get(usr['user_id'])
         api_key = user.api_key
         response = self.app.get(base.url('my_account_api_keys'))
         response.mustcontain(api_key)
@@ -266,11 +263,11 @@
         response = response.follow()
         response.mustcontain(fingerprint)
         user_id = response.session['authuser']['user_id']
-        ssh_key = UserSshKeys.query().filter(UserSshKeys.user_id == user_id).one()
+        ssh_key = db.UserSshKeys.query().filter(db.UserSshKeys.user_id == user_id).one()
         assert ssh_key.fingerprint == fingerprint
         assert ssh_key.description == description
-        Session().delete(ssh_key)
-        Session().commit()
+        meta.Session().delete(ssh_key)
+        meta.Session().commit()
 
     def test_my_account_remove_ssh_key(self):
         description = ''
@@ -285,12 +282,12 @@
         self.checkSessionFlash(response, 'SSH key %s successfully added' % fingerprint)
         response.follow()
         user_id = response.session['authuser']['user_id']
-        ssh_key = UserSshKeys.query().filter(UserSshKeys.user_id == user_id).one()
+        ssh_key = db.UserSshKeys.query().filter(db.UserSshKeys.user_id == user_id).one()
         assert ssh_key.description == 'me@localhost'
 
         response = self.app.post(base.url('my_account_ssh_keys_delete'),
                                  {'del_public_key_fingerprint': ssh_key.fingerprint,
                                   '_session_csrf_secret_token': self.session_csrf_secret_token()})
         self.checkSessionFlash(response, 'SSH key successfully deleted')
-        keys = UserSshKeys.query().all()
+        keys = db.UserSshKeys.query().all()
         assert 0 == len(keys)
--- a/kallithea/tests/functional/test_pullrequests.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_pullrequests.py	Thu May 27 21:27:37 2021 +0200
@@ -3,8 +3,7 @@
 import pytest
 
 from kallithea.controllers.pullrequests import PullrequestsController
-from kallithea.model.db import PullRequest, User
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
@@ -91,9 +90,9 @@
 
     def test_update_reviewers(self):
         self.log_user()
-        regular_user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
-        regular_user2 = User.get_by_username(base.TEST_USER_REGULAR2_LOGIN)
-        admin_user = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        regular_user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        regular_user2 = db.User.get_by_username(base.TEST_USER_REGULAR2_LOGIN)
+        admin_user = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
         # create initial PR
         response = self.app.post(base.url(controller='pullrequests', action='create',
@@ -251,12 +250,12 @@
             },
             status=302)
         pr1_id = int(re.search(r'/pull-request/(\d+)/', response.location).group(1))
-        pr1 = PullRequest.get(pr1_id)
+        pr1 = db.PullRequest.get(pr1_id)
 
         assert pr1.org_ref == 'branch:webvcs:9e6119747791ff886a5abe1193a730b6bf874e1c'
         assert pr1.other_ref == 'branch:default:948da46b29c125838a717f6a8496eb409717078d'
 
-        Session().rollback() # invalidate loaded PR objects before issuing next request.
+        meta.Session().rollback() # invalidate loaded PR objects before issuing next request.
 
         # create PR 2 (new iteration with same ancestor)
         response = self.app.post(
@@ -270,15 +269,15 @@
              },
              status=302)
         pr2_id = int(re.search(r'/pull-request/(\d+)/', response.location).group(1))
-        pr1 = PullRequest.get(pr1_id)
-        pr2 = PullRequest.get(pr2_id)
+        pr1 = db.PullRequest.get(pr1_id)
+        pr2 = db.PullRequest.get(pr2_id)
 
         assert pr2_id != pr1_id
-        assert pr1.status == PullRequest.STATUS_CLOSED
+        assert pr1.status == db.PullRequest.STATUS_CLOSED
         assert pr2.org_ref == 'branch:webvcs:5ec21f21aafe95220f1fc4843a4a57c378498b71'
         assert pr2.other_ref == pr1.other_ref
 
-        Session().rollback() # invalidate loaded PR objects before issuing next request.
+        meta.Session().rollback() # invalidate loaded PR objects before issuing next request.
 
         # create PR 3 (new iteration with new ancestor)
         response = self.app.post(
@@ -292,11 +291,11 @@
              },
              status=302)
         pr3_id = int(re.search(r'/pull-request/(\d+)/', response.location).group(1))
-        pr2 = PullRequest.get(pr2_id)
-        pr3 = PullRequest.get(pr3_id)
+        pr2 = db.PullRequest.get(pr2_id)
+        pr3 = db.PullRequest.get(pr3_id)
 
         assert pr3_id != pr2_id
-        assert pr2.status == PullRequest.STATUS_CLOSED
+        assert pr2.status == db.PullRequest.STATUS_CLOSED
         assert pr3.org_ref == 'branch:webvcs:fb95b340e0d03fa51f33c56c991c08077c99303e'
         assert pr3.other_ref == 'branch:default:41d2568309a05f422cffb8008e599d385f8af439'
 
@@ -308,13 +307,13 @@
         self.repo_name = 'main'
         repo = fixture.create_repo(self.repo_name, repo_type='hg')
         self.repo_scm_instance = repo.scm_instance
-        Session().commit()
+        meta.Session().commit()
         self.c = PullrequestsController()
 
     def teardown_method(self, method):
         fixture.destroy_repo('main')
-        Session().commit()
-        Session.remove()
+        meta.Session().commit()
+        meta.Session.remove()
 
     def test_repo_refs_empty_repo(self):
         # empty repo with no commits, no branches, no bookmarks, just one tag
--- a/kallithea/tests/functional/test_pullrequests_git.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_pullrequests_git.py	Thu May 27 21:27:37 2021 +0200
@@ -3,7 +3,7 @@
 import pytest
 
 from kallithea.controllers.pullrequests import PullrequestsController
-from kallithea.model.meta import Session
+from kallithea.model import meta
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
 
@@ -92,13 +92,13 @@
         self.repo_name = 'main'
         repo = fixture.create_repo(self.repo_name, repo_type='git')
         self.repo_scm_instance = repo.scm_instance
-        Session().commit()
+        meta.Session().commit()
         self.c = PullrequestsController()
 
     def teardown_method(self, method):
         fixture.destroy_repo('main')
-        Session().commit()
-        Session.remove()
+        meta.Session().commit()
+        meta.Session.remove()
 
     def test_repo_refs_empty_repo(self):
         # empty repo with no commits, no branches, no bookmarks, just one tag
--- a/kallithea/tests/functional/test_search_indexing.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_search_indexing.py	Thu May 27 21:27:37 2021 +0200
@@ -1,8 +1,8 @@
 import mock
 
-from kallithea import CONFIG
-from kallithea.config.conf import INDEX_FILENAMES
-from kallithea.model.meta import Session
+import kallithea
+from kallithea.lib.conf import INDEX_FILENAMES
+from kallithea.model import meta
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.tests import base
@@ -66,7 +66,7 @@
         # (FYI, ENOMEM occurs at forking "git" with python 2.7.3,
         # Linux 3.2.78-1 x86_64, 3GB memory, and no ulimit
         # configuration for memory)
-        create_test_index(base.TESTS_TMP_PATH, CONFIG, full_index=full_index)
+        create_test_index(base.TESTS_TMP_PATH, kallithea.CONFIG, full_index=full_index)
 
 
 class TestSearchControllerIndexing(base.TestController):
@@ -103,8 +103,8 @@
                 RepoGroupModel().delete(groupids.pop(groupname),
                                         force_delete=True)
 
-        Session().commit()
-        Session.remove()
+        meta.Session().commit()
+        meta.Session.remove()
 
         rebuild_index(full_index=True) # rebuild fully for subsequent tests
 
--- a/kallithea/tests/functional/test_summary.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/functional/test_summary.py	Thu May 27 21:27:37 2021 +0200
@@ -14,8 +14,7 @@
 
 import pytest
 
-from kallithea.model.db import Repository
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo import RepoModel
 from kallithea.model.scm import ScmModel
 from kallithea.tests import base
@@ -36,7 +35,7 @@
 
     def test_index_hg(self, custom_settings):
         self.log_user()
-        ID = Repository.get_by_repo_name(base.HG_REPO).repo_id
+        ID = db.Repository.get_by_repo_name(base.HG_REPO).repo_id
         response = self.app.get(base.url(controller='summary',
                                     action='index',
                                     repo_name=base.HG_REPO))
@@ -66,7 +65,7 @@
 
     def test_index_git(self, custom_settings):
         self.log_user()
-        ID = Repository.get_by_repo_name(base.GIT_REPO).repo_id
+        ID = db.Repository.get_by_repo_name(base.GIT_REPO).repo_id
         response = self.app.get(base.url(controller='summary',
                                     action='index',
                                     repo_name=base.GIT_REPO))
@@ -95,7 +94,7 @@
 
     def test_index_by_id_hg(self):
         self.log_user()
-        ID = Repository.get_by_repo_name(base.HG_REPO).repo_id
+        ID = db.Repository.get_by_repo_name(base.HG_REPO).repo_id
         response = self.app.get(base.url(controller='summary',
                                     action='index',
                                     repo_name='_%s' % ID))
@@ -119,12 +118,12 @@
         try:
             response.mustcontain("repo_1")
         finally:
-            RepoModel().delete(Repository.get_by_repo_name('repo_1'))
-            Session().commit()
+            RepoModel().delete(db.Repository.get_by_repo_name('repo_1'))
+            meta.Session().commit()
 
     def test_index_by_id_git(self):
         self.log_user()
-        ID = Repository.get_by_repo_name(base.GIT_REPO).repo_id
+        ID = db.Repository.get_by_repo_name(base.GIT_REPO).repo_id
         response = self.app.get(base.url(controller='summary',
                                     action='index',
                                     repo_name='_%s' % ID))
@@ -139,9 +138,9 @@
         )
 
     def _enable_stats(self, repo):
-        r = Repository.get_by_repo_name(repo)
+        r = db.Repository.get_by_repo_name(repo)
         r.enable_statistics = True
-        Session().commit()
+        meta.Session().commit()
 
     def test_index_trending(self):
         self.log_user()
--- a/kallithea/tests/models/common.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/common.py	Thu May 27 21:27:37 2021 +0200
@@ -1,6 +1,5 @@
 from kallithea.lib.auth import AuthUser
-from kallithea.model.db import RepoGroup, Repository, User
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user import UserModel
@@ -11,17 +10,17 @@
 
 
 def _destroy_project_tree(test_u1_id):
-    Session.remove()
-    repo_group = RepoGroup.get_by_group_name(group_name='g0')
+    meta.Session.remove()
+    repo_group = db.RepoGroup.get_by_group_name(group_name='g0')
     for el in reversed(repo_group.recursive_groups_and_repos()):
-        if isinstance(el, Repository):
+        if isinstance(el, db.Repository):
             RepoModel().delete(el)
-        elif isinstance(el, RepoGroup):
+        elif isinstance(el, db.RepoGroup):
             RepoGroupModel().delete(el, force_delete=True)
 
-    u = User.get(test_u1_id)
-    Session().delete(u)
-    Session().commit()
+    u = db.User.get(test_u1_id)
+    meta.Session().delete(u)
+    meta.Session().commit()
 
 
 def _create_project_tree():
@@ -70,7 +69,7 @@
 
 
 def expected_count(group_name, objects=False):
-    repo_group = RepoGroup.get_by_group_name(group_name=group_name)
+    repo_group = db.RepoGroup.get_by_group_name(group_name=group_name)
     objs = repo_group.recursive_groups_and_repos()
     if objects:
         return objs
--- a/kallithea/tests/models/test_changeset_status.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_changeset_status.py	Thu May 27 21:27:37 2021 +0200
@@ -1,8 +1,14 @@
+from kallithea.model import db
 from kallithea.model.changeset_status import ChangesetStatusModel
-from kallithea.model.db import ChangesetStatus as CS
 from kallithea.tests import base
 
 
+STATUS_UNDER_REVIEW = db.ChangesetStatus.STATUS_UNDER_REVIEW
+STATUS_APPROVED = db.ChangesetStatus.STATUS_APPROVED
+STATUS_REJECTED = db.ChangesetStatus.STATUS_REJECTED
+STATUS_NOT_REVIEWED = db.ChangesetStatus.STATUS_NOT_REVIEWED
+
+
 class CSM(object): # ChangesetStatusMock
 
     def __init__(self, status):
@@ -15,27 +21,27 @@
         self.m = ChangesetStatusModel()
 
     @base.parametrize('name,expected_result,statuses', [
-        ('empty list', CS.STATUS_UNDER_REVIEW, []),
-        ('approve', CS.STATUS_APPROVED, [CSM(CS.STATUS_APPROVED)]),
-        ('approve2', CS.STATUS_APPROVED, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_APPROVED)]),
-        ('approve_reject', CS.STATUS_REJECTED, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_REJECTED)]),
-        ('approve_underreview', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_UNDER_REVIEW)]),
-        ('approve_notreviewed', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_NOT_REVIEWED)]),
-        ('underreview', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_UNDER_REVIEW), CSM(CS.STATUS_UNDER_REVIEW)]),
-        ('reject', CS.STATUS_REJECTED, [CSM(CS.STATUS_REJECTED)]),
-        ('reject_underreview', CS.STATUS_REJECTED, [CSM(CS.STATUS_REJECTED), CSM(CS.STATUS_UNDER_REVIEW)]),
-        ('reject_notreviewed', CS.STATUS_REJECTED, [CSM(CS.STATUS_REJECTED), CSM(CS.STATUS_NOT_REVIEWED)]),
-        ('notreviewed', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_NOT_REVIEWED)]),
-        ('approve_none', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_APPROVED), None]),
-        ('approve2_none', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_APPROVED), None]),
-        ('approve_reject_none', CS.STATUS_REJECTED, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_REJECTED), None]),
-        ('approve_underreview_none', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_UNDER_REVIEW), None]),
-        ('approve_notreviewed_none', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_APPROVED), CSM(CS.STATUS_NOT_REVIEWED), None]),
-        ('underreview_none', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_UNDER_REVIEW), CSM(CS.STATUS_UNDER_REVIEW), None]),
-        ('reject_none', CS.STATUS_REJECTED, [CSM(CS.STATUS_REJECTED), None]),
-        ('reject_underreview_none', CS.STATUS_REJECTED, [CSM(CS.STATUS_REJECTED), CSM(CS.STATUS_UNDER_REVIEW), None]),
-        ('reject_notreviewed_none', CS.STATUS_REJECTED, [CSM(CS.STATUS_REJECTED), CSM(CS.STATUS_NOT_REVIEWED), None]),
-        ('notreviewed_none', CS.STATUS_UNDER_REVIEW, [CSM(CS.STATUS_NOT_REVIEWED), None]),
+        ('empty list', STATUS_UNDER_REVIEW, []),
+        ('approve', STATUS_APPROVED, [CSM(STATUS_APPROVED)]),
+        ('approve2', STATUS_APPROVED, [CSM(STATUS_APPROVED), CSM(STATUS_APPROVED)]),
+        ('approve_reject', STATUS_REJECTED, [CSM(STATUS_APPROVED), CSM(STATUS_REJECTED)]),
+        ('approve_underreview', STATUS_UNDER_REVIEW, [CSM(STATUS_APPROVED), CSM(STATUS_UNDER_REVIEW)]),
+        ('approve_notreviewed', STATUS_UNDER_REVIEW, [CSM(STATUS_APPROVED), CSM(STATUS_NOT_REVIEWED)]),
+        ('underreview', STATUS_UNDER_REVIEW, [CSM(STATUS_UNDER_REVIEW), CSM(STATUS_UNDER_REVIEW)]),
+        ('reject', STATUS_REJECTED, [CSM(STATUS_REJECTED)]),
+        ('reject_underreview', STATUS_REJECTED, [CSM(STATUS_REJECTED), CSM(STATUS_UNDER_REVIEW)]),
+        ('reject_notreviewed', STATUS_REJECTED, [CSM(STATUS_REJECTED), CSM(STATUS_NOT_REVIEWED)]),
+        ('notreviewed', STATUS_UNDER_REVIEW, [CSM(STATUS_NOT_REVIEWED)]),
+        ('approve_none', STATUS_UNDER_REVIEW, [CSM(STATUS_APPROVED), None]),
+        ('approve2_none', STATUS_UNDER_REVIEW, [CSM(STATUS_APPROVED), CSM(STATUS_APPROVED), None]),
+        ('approve_reject_none', STATUS_REJECTED, [CSM(STATUS_APPROVED), CSM(STATUS_REJECTED), None]),
+        ('approve_underreview_none', STATUS_UNDER_REVIEW, [CSM(STATUS_APPROVED), CSM(STATUS_UNDER_REVIEW), None]),
+        ('approve_notreviewed_none', STATUS_UNDER_REVIEW, [CSM(STATUS_APPROVED), CSM(STATUS_NOT_REVIEWED), None]),
+        ('underreview_none', STATUS_UNDER_REVIEW, [CSM(STATUS_UNDER_REVIEW), CSM(STATUS_UNDER_REVIEW), None]),
+        ('reject_none', STATUS_REJECTED, [CSM(STATUS_REJECTED), None]),
+        ('reject_underreview_none', STATUS_REJECTED, [CSM(STATUS_REJECTED), CSM(STATUS_UNDER_REVIEW), None]),
+        ('reject_notreviewed_none', STATUS_REJECTED, [CSM(STATUS_REJECTED), CSM(STATUS_NOT_REVIEWED), None]),
+        ('notreviewed_none', STATUS_UNDER_REVIEW, [CSM(STATUS_NOT_REVIEWED), None]),
     ])
     def test_result(self, name, expected_result, statuses):
         result = self.m._calculate_status(statuses)
--- a/kallithea/tests/models/test_comments.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_comments.py	Thu May 27 21:27:37 2021 +0200
@@ -1,8 +1,8 @@
 import pytest
 from tg.util.webtest import test_context
 
+from kallithea.model import db
 from kallithea.model.comment import ChangesetCommentsModel
-from kallithea.model.db import Repository
 from kallithea.tests import base
 
 
@@ -23,7 +23,7 @@
 
     def test_create_delete_general_comment(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
+            repo_id = db.Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
@@ -47,7 +47,7 @@
 
     def test_create_delete_inline_comment(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
+            repo_id = db.Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
@@ -81,7 +81,7 @@
 
     def test_create_delete_multiple_inline_comments(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
+            repo_id = db.Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
@@ -161,7 +161,7 @@
 
     def test_selective_retrieval_of_inline_comments(self):
         with test_context(self.app):
-            repo_id = Repository.get_by_repo_name(base.HG_REPO).repo_id
+            repo_id = db.Repository.get_by_repo_name(base.HG_REPO).repo_id
             revision = '9a7b4ff9e8b40bbda72fc75f162325b9baa45cda'
 
             self._check_comment_count(repo_id, revision,
--- a/kallithea/tests/models/test_diff_parsers.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_diff_parsers.py	Thu May 27 21:27:37 2021 +0200
@@ -268,6 +268,62 @@
           'binary': False,
           'ops': {RENAMED_FILENODE: 'file renamed from oh no to oh yes'}}),
     ],
+    'git_diff_quoting.diff': [
+        ('"foo"',
+         'added',
+         {'added': 1,
+          'binary': False,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ("'foo'",
+         'added',
+         {'added': 1,
+          'binary': False,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ("'foo'" '"foo"',
+         'added',
+         {'added': 1,
+          'binary': False,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ('a\r\nb',  # Note: will be parsed correctly, but other parts of Kallithea can't handle it
+         'added',
+         {'added': 1,
+          'binary': False,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ('foo\rfoo',  # Note: will be parsed correctly, but other parts of Kallithea can't handle it
+         'added',
+        {'added': 0,
+         'binary': True,
+         'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ('foo bar',
+         'added',
+         {'added': 1,
+          'binary': False,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ('test',
+         'added',
+         {'added': 1,
+          'binary': False,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ('esc\033foo',  # Note: will be parsed and handled correctly, but without good UI
+         'added',
+         {'added': 0,
+          'binary': True,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+        ('tab\tfoo',  # Note: will be parsed and handled correctly, but without good UI
+         'added',
+         {'added': 0,
+          'binary': True,
+          'deleted': 0,
+          'ops': {1: 'new file 100644'}}),
+    ],
 }
 
 
@@ -288,27 +344,30 @@
         raw_diff = fixture.load_resource('markuptest.diff', strip=False)
         diff_processor = DiffProcessor(raw_diff)
         chunks = diff_processor.parsed[0]['chunks']
-        assert not chunks[0]
+        assert len(chunks) == 1, chunks
         #from pprint import pprint; pprint(chunks[1])
         l = ['\n']
-        for d in chunks[1]:
+        for d in chunks[0]:
             l.append('%(action)-7s %(new_lineno)3s %(old_lineno)3s %(line)r\n' % d)
         s = ''.join(l)
         assert s == r'''
-context ... ... '@@ -51,6 +51,13 @@\n'
-unmod    51  51 '<u>\t</u>begin();\n'
+context         '@@ -51,8 +51,15 @@'
+unmod    51  51 '<u>\t</u>begin();'
 unmod    52  52 '<u>\t</u><i></i>'
-add      53     '<u>\t</u>int foo;<u class="cr"></u>\n'
-add      54     '<u>\t</u>int bar; <u class="cr"></u>\n'
-add      55     '<u>\t</u>int baz;<u>\t</u><u class="cr"></u>\n'
+add      53     '<u>\t</u>int foo;<u class="cr"></u>'
+add      54     '<u>\t</u>int bar; <u class="cr"></u>'
+add      55     '<u>\t</u>int baz;<u>\t</u><u class="cr"></u>'
 add      56     '<u>\t</u>int space; <i></i>'
 add      57     '<u>\t</u>int tab;<u>\t</u><i></i>'
 add      58     '<u>\t</u><i></i>'
 unmod    59  53 ' <i></i>'
-del          54 '<u>\t</u>#define MAX_STEPS (48)\n'
-add      60     '<u>\t</u><u class="cr"></u>\n'
-add      61     '<u>\t</u>#define MAX_STEPS (64)<u class="cr"></u>\n'
-unmod    62  55 '\n'
-del          56 '<u>\t</u>#define MIN_STEPS (<del>48</del>)\n'
-add      63     '<u>\t</u>#define MIN_STEPS (<ins>42</ins>)\n'
+del          54 '<u>\t</u>#define MAX_STEPS (48)'
+add      60     '<u>\t</u><u class="cr"></u>'
+add      61     '<u>\t</u>#define MAX_STEPS (64)<u class="cr"></u>'
+unmod    62  55 ''
+del          56 '<u>\t</u>#define MIN_STEPS (<del>48</del>)'
+add      63     '<u>\t</u>#define MIN_STEPS (<ins>42</ins>)'
+unmod    64  57 ''
+del          58 '<u>\t</u>#define <del>MORE_STEPS</del><u>\t</u><del>+</del>(<del>48</del>)<del><u>\t</u></del><del><i></i></del>'
+add      65     '<u>\t</u>#define <ins>LESS_STEPS</ins><u>\t</u>(<ins>42</ins>)<ins> <i></i></ins>'
 '''
--- a/kallithea/tests/models/test_dump_html_mails.ref.html	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_dump_html_mails.ref.html	Thu May 27 21:27:37 2021 +0200
@@ -7,7 +7,7 @@
 <pre>
 From: u1 u1 <name@example.com>
 To: u2@example.com
-Subject: [Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
+Subject: [Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch by u2
 </pre>
 <hr/>
 <pre>http://comment.org
@@ -166,7 +166,7 @@
 <pre>
 From: u1 u1 <name@example.com>
 To: u2@example.com
-Subject: [Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
+Subject: [Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch by u2
 </pre>
 <hr/>
 <pre>http://comment.org
@@ -325,7 +325,7 @@
 <pre>
 From: u1 u1 <name@example.com>
 To: u2@example.com
-Subject: [Approved: Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
+Subject: [Approved: Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch by u2
 </pre>
 <hr/>
 <pre>http://comment.org
@@ -502,7 +502,7 @@
 <pre>
 From: u1 u1 <name@example.com>
 To: u2@example.com
-Subject: [Approved: Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch
+Subject: [Approved: Comment] repo/name changeset cafe1234 "This changeset did something cl..." on brunch by u2
 </pre>
 <hr/>
 <pre>http://comment.org
@@ -882,6 +882,197 @@
 <h1>pull_request, is_mention=False</h1>
 <pre>
 From: u1 u1 <name@example.com>
+To: u1@example.com
+Subject: [Review] repo/name PR #7 "The Title" from devbranch by u2
+</pre>
+<hr/>
+<pre>http://pr.org/7
+
+Added as Reviewer of Pull Request #7 "The Title" by Requesting User (root)
+
+
+Pull request #7 "The Title" by u2 u3 (u2)
+from https://dev.org/repo branch devbranch
+to http://mainline.com/repo branch trunk
+
+
+Description:
+
+This PR is 'awesome' because it does <stuff>
+ - please approve indented!
+
+
+Changesets:
+
+Introduce one and two
+Make one plus two equal tree
+
+
+View Pull Request: http://pr.org/7
+</pre>
+<hr/>
+<!--!doctype html-->
+<!--html lang="en"-->
+<!--head-->
+    <!--title--><!--/title-->
+    <!--meta name="viewport" content="width=device-width"-->
+    <!--meta http-equiv="Content-Type" content="text/html; charset=UTF-8"-->
+<!--/head-->
+<!--body-->
+<table align="center" cellpadding="0" cellspacing="0" border="0" style="min-width:348px;max-width:800px;font-family:Helvetica,Arial,sans-serif;font-weight:200;font-size:14px;line-height:17px;color:#202020">
+    <tr>
+        <td width="30px" style="width:30px"></td>
+        <td>
+            <table width="100%" cellpadding="0" cellspacing="0" border="0"
+                   style="table-layout:fixed;font-family:Helvetica,Arial,sans-serif;border:1px solid #ddd">
+                <tr><td width="30px" style="width:30px"></td><td></td><td width="30px" style="width:30px"></td></tr>
+                <tr>
+                    <td colspan="3">
+<table bgcolor="#f9f9f9" width="100%" cellpadding="0" cellspacing="0"
+       style="border-bottom:1px solid #ddd">
+    <tr>
+        <td height="20px" style="height:20px" colspan="3"></td>
+    </tr>
+    <tr>
+        <td width="30px" style="width:30px"></td>
+        <td style="font-family:Helvetica,Arial,sans-serif;font-size:19px;line-height:24px">
+            <a style="text-decoration:none;font-weight:600;color:#395fa0" href="http://pr.org/7"
+               target="_blank">Added as Reviewer of Pull Request #7 &#34;The Title&#34; by Requesting User (root)</a>
+        </td>
+        <td width="30px" style="width:30px"></td>
+    </tr>
+    <tr>
+        <td height="20px" style="height:20px" colspan="3"></td>
+    </tr>
+</table>
+                    </td>
+                </tr>
+                <tr>
+                    <td height="30px" style="height:30px" colspan="3"></td>
+                </tr>
+                <tr>
+                    <td></td>
+                    <td>
+<table cellpadding="0" cellspacing="0" border="0" width="100%">
+    <tr>
+        <td>
+            <div>
+                Pull request
+                <a style="color:#395fa0;text-decoration:none"
+                   href="http://pr.org/7">#7 "The Title"</a>
+                by
+                <span style="border:#ddd 1px solid;background:#f9f9f9">u2 u3 (u2)</span>.
+            </div>
+            <div>
+                from
+                <a style="color:#202020;text-decoration:none;border:#ddd 1px solid;background:#f9f9f9"
+                   href="https://dev.org/repo">https://dev.org/repo</a>
+                branch
+                <span style="border:#ddd 1px solid;background:#f9f9f9">devbranch</span>
+                <br/>
+                to
+                <a style="color:#202020;text-decoration:none;border:#ddd 1px solid;background:#f9f9f9"
+                   href="http://mainline.com/repo">http://mainline.com/repo</a>
+                branch
+                <span style="border:#ddd 1px solid;background:#f9f9f9">trunk</span>
+            </div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+    <tr>
+        <td>
+            <div>
+                Description:
+            </div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+    <tr>
+        <td>
+            <table cellpadding="0" cellspacing="0" width="100%" border="0" bgcolor="#f9f9f9" style="border:1px solid #ddd;border-radius:4px">
+                <tr>
+                    <td height="10px" style="height:10px" colspan="3"></td>
+                </tr>
+                <tr>
+                    <td width="20px" style="width:20px"></td>
+                    <td>
+                        <div style="font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace;white-space:pre-wrap"><div class="formatted-fixed">This PR is &#39;awesome&#39; because it does &lt;stuff&gt;<br/> - please approve indented!</div></div>
+                    </td>
+                    <td width="20px" style="width:20px"></td>
+                </tr>
+                <tr>
+                    <td height="10px" style="height:10px" colspan="3"></td>
+                </tr>
+            </table>
+        </td>
+    </tr>
+    <tr><td height="15px" style="height:15px"></td></tr>
+    <tr>
+        <td>
+            <div>Changesets:</div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+
+    <tr>
+        <td style="font-family:Helvetica,Arial,sans-serif">
+            <ul style="color:#395fa0;padding-left:15px;margin:0">
+                    <li style="mso-special-format:bullet">
+                        <a style="color:#395fa0;text-decoration:none"
+                           href="http://changeset_home/?repo_name=repo_org&amp;revision=123abc123abc123abc123abc123abc123abc123abc">
+                            Introduce one and two
+                        </a>
+                    </li>
+                    <li style="mso-special-format:bullet">
+                        <a style="color:#395fa0;text-decoration:none"
+                           href="http://changeset_home/?repo_name=repo_org&amp;revision=567fed567fed567fed567fed567fed567fed567fed">
+                            Make one plus two equal tree
+                        </a>
+                    </li>
+            </ul>
+        </td>
+    </tr>
+    <tr>
+        <td>
+<center>
+    <table cellspacing="0" cellpadding="0" style="margin-left:auto;margin-right:auto">
+        <tr>
+            <td height="25px" style="height:25px"></td>
+        </tr>
+        <tr>
+            <td style="border-collapse:collapse;border-radius:2px;text-align:center;display:block;border:solid 1px #395fa0;padding:11px 20px 11px 20px">
+                <a href="http://pr.org/7" style="text-decoration:none;display:block" target="_blank">
+                    <center>
+                        <font size="3">
+                            <span style="font-family:Helvetica,Arial,sans-serif;font-weight:700;font-size:15px;line-height:14px;color:#395fa0;white-space:nowrap;vertical-align:middle">View Pull Request</span>
+                        </font>
+                    </center>
+                </a>
+            </td>
+        </tr>
+    </table>
+</center>
+        </td>
+    </tr>
+</table>
+                    </td>
+                    <td></td>
+                </tr>
+                <tr>
+                    <td height="30px" style="height:30px" colspan="3"></td>
+                </tr>
+            </table>
+        </td>
+        <td width="30px" style="width:30px"></td>
+    </tr>
+</table>
+<!--/body-->
+<!--/html-->
+<hr/>
+<hr/>
+<h1>pull_request, is_mention=False</h1>
+<pre>
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Review] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
@@ -1073,6 +1264,197 @@
 <h1>pull_request, is_mention=True</h1>
 <pre>
 From: u1 u1 <name@example.com>
+To: u1@example.com
+Subject: [Review] repo/name PR #7 "The Title" from devbranch by u2
+</pre>
+<hr/>
+<pre>http://pr.org/7
+
+Mention on Pull Request #7 "The Title" by Requesting User (root)
+
+
+Pull request #7 "The Title" by u2 u3 (u2)
+from https://dev.org/repo branch devbranch
+to http://mainline.com/repo branch trunk
+
+
+Description:
+
+This PR is 'awesome' because it does <stuff>
+ - please approve indented!
+
+
+Changesets:
+
+Introduce one and two
+Make one plus two equal tree
+
+
+View Pull Request: http://pr.org/7
+</pre>
+<hr/>
+<!--!doctype html-->
+<!--html lang="en"-->
+<!--head-->
+    <!--title--><!--/title-->
+    <!--meta name="viewport" content="width=device-width"-->
+    <!--meta http-equiv="Content-Type" content="text/html; charset=UTF-8"-->
+<!--/head-->
+<!--body-->
+<table align="center" cellpadding="0" cellspacing="0" border="0" style="min-width:348px;max-width:800px;font-family:Helvetica,Arial,sans-serif;font-weight:200;font-size:14px;line-height:17px;color:#202020">
+    <tr>
+        <td width="30px" style="width:30px"></td>
+        <td>
+            <table width="100%" cellpadding="0" cellspacing="0" border="0"
+                   style="table-layout:fixed;font-family:Helvetica,Arial,sans-serif;border:1px solid #ddd">
+                <tr><td width="30px" style="width:30px"></td><td></td><td width="30px" style="width:30px"></td></tr>
+                <tr>
+                    <td colspan="3">
+<table bgcolor="#f9f9f9" width="100%" cellpadding="0" cellspacing="0"
+       style="border-bottom:1px solid #ddd">
+    <tr>
+        <td height="20px" style="height:20px" colspan="3"></td>
+    </tr>
+    <tr>
+        <td width="30px" style="width:30px"></td>
+        <td style="font-family:Helvetica,Arial,sans-serif;font-size:19px;line-height:24px">
+            <a style="text-decoration:none;font-weight:600;color:#395fa0" href="http://pr.org/7"
+               target="_blank">Mention on Pull Request #7 &#34;The Title&#34; by Requesting User (root)</a>
+        </td>
+        <td width="30px" style="width:30px"></td>
+    </tr>
+    <tr>
+        <td height="20px" style="height:20px" colspan="3"></td>
+    </tr>
+</table>
+                    </td>
+                </tr>
+                <tr>
+                    <td height="30px" style="height:30px" colspan="3"></td>
+                </tr>
+                <tr>
+                    <td></td>
+                    <td>
+<table cellpadding="0" cellspacing="0" border="0" width="100%">
+    <tr>
+        <td>
+            <div>
+                Pull request
+                <a style="color:#395fa0;text-decoration:none"
+                   href="http://pr.org/7">#7 "The Title"</a>
+                by
+                <span style="border:#ddd 1px solid;background:#f9f9f9">u2 u3 (u2)</span>.
+            </div>
+            <div>
+                from
+                <a style="color:#202020;text-decoration:none;border:#ddd 1px solid;background:#f9f9f9"
+                   href="https://dev.org/repo">https://dev.org/repo</a>
+                branch
+                <span style="border:#ddd 1px solid;background:#f9f9f9">devbranch</span>
+                <br/>
+                to
+                <a style="color:#202020;text-decoration:none;border:#ddd 1px solid;background:#f9f9f9"
+                   href="http://mainline.com/repo">http://mainline.com/repo</a>
+                branch
+                <span style="border:#ddd 1px solid;background:#f9f9f9">trunk</span>
+            </div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+    <tr>
+        <td>
+            <div>
+                Description:
+            </div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+    <tr>
+        <td>
+            <table cellpadding="0" cellspacing="0" width="100%" border="0" bgcolor="#f9f9f9" style="border:1px solid #ddd;border-radius:4px">
+                <tr>
+                    <td height="10px" style="height:10px" colspan="3"></td>
+                </tr>
+                <tr>
+                    <td width="20px" style="width:20px"></td>
+                    <td>
+                        <div style="font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace;white-space:pre-wrap"><div class="formatted-fixed">This PR is &#39;awesome&#39; because it does &lt;stuff&gt;<br/> - please approve indented!</div></div>
+                    </td>
+                    <td width="20px" style="width:20px"></td>
+                </tr>
+                <tr>
+                    <td height="10px" style="height:10px" colspan="3"></td>
+                </tr>
+            </table>
+        </td>
+    </tr>
+    <tr><td height="15px" style="height:15px"></td></tr>
+    <tr>
+        <td>
+            <div>Changesets:</div>
+        </td>
+    </tr>
+    <tr><td height="10px" style="height:10px"></td></tr>
+
+    <tr>
+        <td style="font-family:Helvetica,Arial,sans-serif">
+            <ul style="color:#395fa0;padding-left:15px;margin:0">
+                    <li style="mso-special-format:bullet">
+                        <a style="color:#395fa0;text-decoration:none"
+                           href="http://changeset_home/?repo_name=repo_org&amp;revision=123abc123abc123abc123abc123abc123abc123abc">
+                            Introduce one and two
+                        </a>
+                    </li>
+                    <li style="mso-special-format:bullet">
+                        <a style="color:#395fa0;text-decoration:none"
+                           href="http://changeset_home/?repo_name=repo_org&amp;revision=567fed567fed567fed567fed567fed567fed567fed">
+                            Make one plus two equal tree
+                        </a>
+                    </li>
+            </ul>
+        </td>
+    </tr>
+    <tr>
+        <td>
+<center>
+    <table cellspacing="0" cellpadding="0" style="margin-left:auto;margin-right:auto">
+        <tr>
+            <td height="25px" style="height:25px"></td>
+        </tr>
+        <tr>
+            <td style="border-collapse:collapse;border-radius:2px;text-align:center;display:block;border:solid 1px #395fa0;padding:11px 20px 11px 20px">
+                <a href="http://pr.org/7" style="text-decoration:none;display:block" target="_blank">
+                    <center>
+                        <font size="3">
+                            <span style="font-family:Helvetica,Arial,sans-serif;font-weight:700;font-size:15px;line-height:14px;color:#395fa0;white-space:nowrap;vertical-align:middle">View Pull Request</span>
+                        </font>
+                    </center>
+                </a>
+            </td>
+        </tr>
+    </table>
+</center>
+        </td>
+    </tr>
+</table>
+                    </td>
+                    <td></td>
+                </tr>
+                <tr>
+                    <td height="30px" style="height:30px" colspan="3"></td>
+                </tr>
+            </table>
+        </td>
+        <td width="30px" style="width:30px"></td>
+    </tr>
+</table>
+<!--/body-->
+<!--/html-->
+<hr/>
+<hr/>
+<h1>pull_request, is_mention=True</h1>
+<pre>
+From: u1 u1 <name@example.com>
 To: u2@example.com
 Subject: [Review] repo/name PR #7 "The Title" from devbranch by u2
 </pre>
--- a/kallithea/tests/models/test_notifications.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_notifications.py	Thu May 27 21:27:37 2021 +0200
@@ -5,10 +5,8 @@
 from tg.util.webtest import test_context
 
 import kallithea.lib.celerylib
-import kallithea.lib.celerylib.tasks
-from kallithea.lib import helpers as h
-from kallithea.model.db import User
-from kallithea.model.meta import Session
+from kallithea.lib import webutils
+from kallithea.model import db, meta
 from kallithea.model.notification import EmailNotificationModel, NotificationModel
 from kallithea.model.user import UserModel
 from kallithea.tests import base
@@ -17,26 +15,26 @@
 class TestNotifications(base.TestController):
 
     def setup_method(self, method):
-        Session.remove()
+        meta.Session.remove()
         u1 = UserModel().create_or_update(username='u1',
                                         password='qweqwe',
                                         email='u1@example.com',
                                         firstname='u1', lastname='u1')
-        Session().commit()
+        meta.Session().commit()
         self.u1 = u1.user_id
 
         u2 = UserModel().create_or_update(username='u2',
                                         password='qweqwe',
                                         email='u2@example.com',
                                         firstname='u2', lastname='u3')
-        Session().commit()
+        meta.Session().commit()
         self.u2 = u2.user_id
 
         u3 = UserModel().create_or_update(username='u3',
                                         password='qweqwe',
                                         email='u3@example.com',
                                         firstname='u3', lastname='u3')
-        Session().commit()
+        meta.Session().commit()
         self.u3 = u3.user_id
 
     def test_create_notification(self):
@@ -49,12 +47,12 @@
                 assert body == "hi there"
                 assert '>hi there<' in html_body
                 assert from_name == 'u1 u1'
-            with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email):
+            with mock.patch.object(kallithea.model.notification, 'send_email', send_email):
                 NotificationModel().create(created_by=self.u1,
-                                                   subject='subj', body='hi there',
+                                                   body='hi there',
                                                    recipients=usrs)
 
-    @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items())))))
+    @mock.patch.object(webutils, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items())))))
     def test_dump_html_mails(self):
         # Exercise all notification types and dump them to one big html file
         l = []
@@ -74,7 +72,7 @@
             l.append('<hr/>\n')
 
         with test_context(self.app):
-            with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email):
+            with mock.patch.object(kallithea.model.notification, 'send_email', send_email):
                 pr_kwargs = dict(
                     pr_nice_id='#7',
                     pr_title='The Title',
@@ -84,7 +82,7 @@
                     pr_target_branch='trunk',
                     pr_source_repo='https://dev.org/repo',
                     pr_source_branch='devbranch',
-                    pr_owner=User.get(self.u2),
+                    pr_owner=db.User.get(self.u2),
                     pr_owner_username='u2'
                     )
 
@@ -103,7 +101,8 @@
                             status_change=[None, 'Approved'],
                             cs_target_repo='http://example.com/repo_target',
                             cs_url='http://changeset.com',
-                            cs_author=User.get(self.u2))),
+                            cs_author_username=db.User.get(self.u2).username,
+                            cs_author=db.User.get(self.u2))),
                         (NotificationModel.TYPE_MESSAGE,
                          'This is the \'body\' of the "test" message\n - nothing interesting here except indentation.',
                          dict()),
@@ -149,17 +148,17 @@
                     for desc, type_, body, kwargs in params:
                         # desc is used as "global" variable
                         NotificationModel().create(created_by=self.u1,
-                                                           subject='unused', body=body, email_kwargs=kwargs,
+                                                           body=body, email_kwargs=kwargs,
                                                            recipients=[self.u2], type_=type_)
 
                 # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly:
                 desc = 'TYPE_PASSWORD_RESET'
                 kwargs = dict(user='John Doe', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url='http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746')
-                kallithea.lib.celerylib.tasks.send_email(['john@doe.com'],
+                kallithea.model.notification.send_email(['john@doe.com'],
                     "Password reset link",
                     EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs),
                     EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs),
-                    from_name=User.get(self.u1).full_name_or_username)
+                    from_name=db.User.get(self.u1).full_name_or_username)
 
         out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \
             re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l))
--- a/kallithea/tests/models/test_permissions.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_permissions.py	Thu May 27 21:27:37 2021 +0200
@@ -1,7 +1,6 @@
+import kallithea
 from kallithea.lib.auth import AuthUser
-from kallithea.model import db
-from kallithea.model.db import Permission, User, UserGroupRepoGroupToPerm, UserToPerm
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.permission import PermissionModel
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
@@ -19,9 +18,9 @@
     @classmethod
     def setup_class(cls):
         # recreate default user to get a clean start
-        PermissionModel().create_default_permissions(user=User.DEFAULT_USER_NAME,
+        PermissionModel().create_default_permissions(user=db.User.DEFAULT_USER_NAME,
                                                      force=True)
-        Session().commit()
+        meta.Session().commit()
 
     def setup_method(self, method):
         self.u1 = UserModel().create_or_update(
@@ -36,12 +35,12 @@
             username='u3', password='qweqwe',
             email='u3@example.com', firstname='u3', lastname='u3'
         )
-        self.anon = User.get_default_user()
+        self.anon = db.User.get_default_user()
         self.a1 = UserModel().create_or_update(
             username='a1', password='qweqwe',
             email='a1@example.com', firstname='a1', lastname='a1', admin=True
         )
-        Session().commit()
+        meta.Session().commit()
 
     def teardown_method(self, method):
         if hasattr(self, 'test_repo'):
@@ -52,7 +51,7 @@
         UserModel().delete(self.u3)
         UserModel().delete(self.a1)
 
-        Session().commit() # commit early to avoid SQLAlchemy warning from double cascade delete to users_groups_members
+        meta.Session().commit() # commit early to avoid SQLAlchemy warning from double cascade delete to users_groups_members
 
         if hasattr(self, 'g1'):
             RepoGroupModel().delete(self.g1.group_id)
@@ -64,47 +63,47 @@
         if hasattr(self, 'ug1'):
             UserGroupModel().delete(self.ug1, force=True)
 
-        Session().commit()
+        meta.Session().commit()
 
     def test_default_perms_set(self):
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == 'repository.read'
+        assert u1_auth.repository_permissions[base.HG_REPO] == 'repository.read'
         new_perm = 'repository.write'
         RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.u1,
                                           perm=new_perm)
-        Session().commit()
+        meta.Session().commit()
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == new_perm
+        assert u1_auth.repository_permissions[base.HG_REPO] == new_perm
 
     def test_default_admin_perms_set(self):
         a1_auth = AuthUser(user_id=self.a1.user_id)
-        assert a1_auth.permissions['repositories'][base.HG_REPO] == 'repository.admin'
+        assert a1_auth.repository_permissions[base.HG_REPO] == 'repository.admin'
         new_perm = 'repository.write'
         RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.a1,
                                           perm=new_perm)
-        Session().commit()
+        meta.Session().commit()
         # cannot really downgrade admins permissions !? they still gets set as
         # admin !
         u1_auth = AuthUser(user_id=self.a1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == 'repository.admin'
+        assert u1_auth.repository_permissions[base.HG_REPO] == 'repository.admin'
 
     def test_default_group_perms(self):
         self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
         self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == 'repository.read'
-        assert u1_auth.permissions['repositories_groups'].get('test1') == 'group.read'
-        assert u1_auth.permissions['repositories_groups'].get('test2') == 'group.read'
-        assert u1_auth.permissions['global'] == set(Permission.DEFAULT_USER_PERMISSIONS)
+        assert u1_auth.repository_permissions[base.HG_REPO] == 'repository.read'
+        assert u1_auth.repository_group_permissions.get('test1') == 'group.read'
+        assert u1_auth.repository_group_permissions.get('test2') == 'group.read'
+        assert u1_auth.global_permissions == set(db.Permission.DEFAULT_USER_PERMISSIONS)
 
     def test_default_admin_group_perms(self):
         self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
         self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
         a1_auth = AuthUser(user_id=self.a1.user_id)
-        assert a1_auth.permissions['repositories'][base.HG_REPO] == 'repository.admin'
-        assert a1_auth.permissions['repositories_groups'].get('test1') == 'group.admin'
-        assert a1_auth.permissions['repositories_groups'].get('test2') == 'group.admin'
+        assert a1_auth.repository_permissions[base.HG_REPO] == 'repository.admin'
+        assert a1_auth.repository_group_permissions.get('test1') == 'group.admin'
+        assert a1_auth.repository_group_permissions.get('test2') == 'group.admin'
 
     def test_propagated_permission_from_users_group_by_explicit_perms_exist(self):
         # make group
@@ -113,9 +112,9 @@
 
         # set user permission none
         RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.u1, perm='repository.none')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == 'repository.read' # inherit from default user
+        assert u1_auth.repository_permissions[base.HG_REPO] == 'repository.read' # inherit from default user
 
         # grant perm for group this should override permission from user
         RepoModel().grant_user_group_permission(repo=base.HG_REPO,
@@ -124,7 +123,7 @@
 
         # verify that user group permissions win
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == 'repository.write'
+        assert u1_auth.repository_permissions[base.HG_REPO] == 'repository.write'
 
     def test_propagated_permission_from_users_group(self):
         # make group
@@ -138,7 +137,7 @@
                                                  perm=new_perm_gr)
         # check perms
         u3_auth = AuthUser(user_id=self.u3.user_id)
-        assert u3_auth.permissions['repositories'][base.HG_REPO] == new_perm_gr
+        assert u3_auth.repository_permissions[base.HG_REPO] == new_perm_gr
 
     def test_propagated_permission_from_users_group_lower_weight(self):
         # make group
@@ -150,9 +149,9 @@
         new_perm_h = 'repository.write'
         RepoModel().grant_user_permission(repo=base.HG_REPO, user=self.u1,
                                           perm=new_perm_h)
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == new_perm_h
+        assert u1_auth.repository_permissions[base.HG_REPO] == new_perm_h
 
         # grant perm for group this should NOT override permission from user
         # since it's lower than granted
@@ -162,19 +161,19 @@
                                                  perm=new_perm_l)
         # check perms
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories'][base.HG_REPO] == new_perm_h
+        assert u1_auth.repository_permissions[base.HG_REPO] == new_perm_h
 
     def test_repo_in_group_permissions(self):
         self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
         self.g2 = fixture.create_repo_group('group2', skip_if_exists=True)
         # both perms should be read !
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.read'
-        assert u1_auth.permissions['repositories_groups'].get('group2') == 'group.read'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.read'
+        assert u1_auth.repository_group_permissions.get('group2') == 'group.read'
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'].get('group1') == 'group.read'
-        assert a1_auth.permissions['repositories_groups'].get('group2') == 'group.read'
+        assert a1_auth.repository_group_permissions.get('group1') == 'group.read'
+        assert a1_auth.repository_group_permissions.get('group2') == 'group.read'
 
         # Change perms to none for both groups
         RepoGroupModel().grant_user_permission(repo_group=self.g1,
@@ -185,54 +184,54 @@
                                                perm='group.none')
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
-        assert u1_auth.permissions['repositories_groups'].get('group2') == 'group.none'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.none'
+        assert u1_auth.repository_group_permissions.get('group2') == 'group.none'
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
-        assert a1_auth.permissions['repositories_groups'].get('group2') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group1') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group2') == 'group.none'
 
         # add repo to group
-        name = db.URL_SEP.join([self.g1.group_name, 'test_perm'])
+        name = kallithea.URL_SEP.join([self.g1.group_name, 'test_perm'])
         self.test_repo = fixture.create_repo(name=name,
                                              repo_type='hg',
                                              repo_group=self.g1,
-                                             cur_user=self.u1,)
+                                             cur_user=self.u1.username)
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
-        assert u1_auth.permissions['repositories_groups'].get('group2') == 'group.none'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.none'
+        assert u1_auth.repository_group_permissions.get('group2') == 'group.none'
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
-        assert a1_auth.permissions['repositories_groups'].get('group2') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group1') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group2') == 'group.none'
 
         # grant permission for u2 !
         RepoGroupModel().grant_user_permission(repo_group=self.g1, user=self.u2,
                                                perm='group.read')
         RepoGroupModel().grant_user_permission(repo_group=self.g2, user=self.u2,
                                                perm='group.read')
-        Session().commit()
+        meta.Session().commit()
         assert self.u1 != self.u2
         # u1 and anon should have not change perms while u2 should !
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
-        assert u1_auth.permissions['repositories_groups'].get('group2') == 'group.none'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.none'
+        assert u1_auth.repository_group_permissions.get('group2') == 'group.none'
 
         u2_auth = AuthUser(user_id=self.u2.user_id)
-        assert u2_auth.permissions['repositories_groups'].get('group1') == 'group.read'
-        assert u2_auth.permissions['repositories_groups'].get('group2') == 'group.read'
+        assert u2_auth.repository_group_permissions.get('group1') == 'group.read'
+        assert u2_auth.repository_group_permissions.get('group2') == 'group.read'
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
-        assert a1_auth.permissions['repositories_groups'].get('group2') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group1') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group2') == 'group.none'
 
     def test_repo_group_user_as_user_group_member(self):
         # create Group1
         self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
         a1_auth = AuthUser(user_id=self.anon.user_id)
 
-        assert a1_auth.permissions['repositories_groups'].get('group1') == 'group.read'
+        assert a1_auth.repository_group_permissions.get('group1') == 'group.read'
 
         # set default permission to none
         RepoGroupModel().grant_user_permission(repo_group=self.g1,
@@ -242,7 +241,7 @@
         self.ug1 = fixture.create_user_group('G1')
         # add user to group
         UserGroupModel().add_user_to_group(self.ug1, self.u1)
-        Session().commit()
+        meta.Session().commit()
 
         # check if user is in the group
         members = [x.user_id for x in UserGroupModel().get(self.ug1.users_group_id).members]
@@ -251,29 +250,29 @@
 
         # check his permissions
         a1_auth = AuthUser(user_id=self.anon.user_id)
-        assert a1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group1') == 'group.none'
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.none'
 
         # grant ug1 read permissions for
         RepoGroupModel().grant_user_group_permission(repo_group=self.g1,
                                                       group_name=self.ug1,
                                                       perm='group.read')
-        Session().commit()
+        meta.Session().commit()
         # check if the
-        obj = Session().query(UserGroupRepoGroupToPerm) \
-            .filter(UserGroupRepoGroupToPerm.group == self.g1) \
-            .filter(UserGroupRepoGroupToPerm.users_group == self.ug1) \
+        obj = meta.Session().query(db.UserGroupRepoGroupToPerm) \
+            .filter(db.UserGroupRepoGroupToPerm.group == self.g1) \
+            .filter(db.UserGroupRepoGroupToPerm.users_group == self.ug1) \
             .scalar()
         assert obj.permission.permission_name == 'group.read'
 
         a1_auth = AuthUser(user_id=self.anon.user_id)
 
-        assert a1_auth.permissions['repositories_groups'].get('group1') == 'group.none'
+        assert a1_auth.repository_group_permissions.get('group1') == 'group.none'
 
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.read'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.read'
 
     def test_inherit_nice_permissions_from_default_user(self):
         user_model = UserModel()
@@ -283,14 +282,14 @@
         user_model.grant_perm(usr, 'hg.create.repository')
         user_model.revoke_perm(usr, 'hg.fork.none')
         user_model.grant_perm(usr, 'hg.fork.repository')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
         # this user will have inherited permissions from default user
-        assert u1_auth.permissions['global'] == set(['hg.create.repository', 'hg.fork.repository',
+        assert u1_auth.global_permissions == set(['hg.create.repository', 'hg.fork.repository',
                               'hg.register.manual_activate',
                               'hg.extern_activate.auto',
                               'repository.read', 'group.read',
-                              'usergroup.read', 'hg.create.write_on_repogroup.true'])
+                              'usergroup.read'])
 
     def test_inherit_sad_permissions_from_default_user(self):
         user_model = UserModel()
@@ -300,14 +299,14 @@
         user_model.grant_perm(usr, 'hg.create.none')
         user_model.revoke_perm(usr, 'hg.fork.repository')
         user_model.grant_perm(usr, 'hg.fork.none')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
         # this user will have inherited permissions from default user
-        assert u1_auth.permissions['global'] == set(['hg.create.none', 'hg.fork.none',
+        assert u1_auth.global_permissions == set(['hg.create.none', 'hg.fork.none',
                               'hg.register.manual_activate',
                               'hg.extern_activate.auto',
                               'repository.read', 'group.read',
-                              'usergroup.read', 'hg.create.write_on_repogroup.true'])
+                              'usergroup.read'])
 
     def test_inherit_more_permissions_from_default_user(self):
         user_model = UserModel()
@@ -324,16 +323,16 @@
         user_model.revoke_perm(self.u1, 'hg.fork.repository')
         user_model.grant_perm(self.u1, 'hg.fork.none')
 
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
         # this user will have inherited more permissions from default user
-        assert u1_auth.permissions['global'] == set([
+        assert u1_auth.global_permissions == set([
                               'hg.create.repository',
                               'hg.fork.repository',
                               'hg.register.manual_activate',
                               'hg.extern_activate.auto',
                               'repository.read', 'group.read',
-                              'usergroup.read', 'hg.create.write_on_repogroup.true'])
+                              'usergroup.read'])
 
     def test_inherit_less_permissions_from_default_user(self):
         user_model = UserModel()
@@ -350,16 +349,16 @@
         user_model.revoke_perm(self.u1, 'hg.fork.none')
         user_model.grant_perm(self.u1, 'hg.fork.repository')
 
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
         # this user will have inherited less permissions from default user
-        assert u1_auth.permissions['global'] == set([
+        assert u1_auth.global_permissions == set([
                               'hg.create.repository',
                               'hg.fork.repository',
                               'hg.register.manual_activate',
                               'hg.extern_activate.auto',
                               'repository.read', 'group.read',
-                              'usergroup.read', 'hg.create.write_on_repogroup.true'])
+                              'usergroup.read'])
 
     def test_inactive_user_group_does_not_affect_global_permissions(self):
         # Add user to inactive user group, set specific permissions on user
@@ -383,15 +382,15 @@
         user_model.revoke_perm(usr, 'hg.fork.repository')
         user_model.grant_perm(usr, 'hg.fork.none')
 
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
 
-        assert u1_auth.permissions['global'] == set(['hg.create.none', 'hg.fork.none',
+        assert u1_auth.global_permissions == set(['hg.create.none', 'hg.fork.none',
                               'hg.register.manual_activate',
                               'hg.extern_activate.auto',
                               'repository.read', 'group.read',
                               'usergroup.read',
-                              'hg.create.write_on_repogroup.true'])
+                              ])
 
     def test_inactive_user_group_does_not_affect_global_permissions_inverse(self):
         # Add user to inactive user group, set specific permissions on user
@@ -415,15 +414,15 @@
         user_model.revoke_perm(usr, 'hg.fork.none')
         user_model.grant_perm(usr, 'hg.fork.repository')
 
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
 
-        assert u1_auth.permissions['global'] == set(['hg.create.repository', 'hg.fork.repository',
+        assert u1_auth.global_permissions == set(['hg.create.repository', 'hg.fork.repository',
                               'hg.register.manual_activate',
                               'hg.extern_activate.auto',
                               'repository.read', 'group.read',
                               'usergroup.read',
-                              'hg.create.write_on_repogroup.true'])
+                              ])
 
     def test_inactive_user_group_does_not_affect_repo_permissions(self):
         self.ug1 = fixture.create_user_group('G1')
@@ -435,7 +434,7 @@
         # admin permissions
         self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
-                                             cur_user=self.u2)
+                                             cur_user=self.u2.username)
 
         # enable admin access for user group on repo
         RepoModel().grant_user_group_permission(self.test_repo,
@@ -445,9 +444,9 @@
         RepoModel().grant_user_permission(self.test_repo,
                                           user='default',
                                           perm='repository.write')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.write'
+        assert u1_auth.repository_permissions['myownrepo'] == 'repository.write'
 
     def test_inactive_user_group_does_not_affect_repo_permissions_inverse(self):
         self.ug1 = fixture.create_user_group('G1')
@@ -459,7 +458,7 @@
         # admin permissions
         self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
-                                             cur_user=self.u2)
+                                             cur_user=self.u2.username)
 
         # enable only write access for user group on repo
         RepoModel().grant_user_group_permission(self.test_repo,
@@ -469,9 +468,9 @@
         RepoModel().grant_user_permission(self.test_repo,
                                           user='default',
                                           perm='repository.admin')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.admin'
+        assert u1_auth.repository_permissions['myownrepo'] == 'repository.admin'
 
     def test_inactive_user_group_does_not_affect_repo_group_permissions(self):
         self.ug1 = fixture.create_user_group('G1')
@@ -489,9 +488,9 @@
         RepoGroupModel().grant_user_permission(self.g1,
                                                user='default',
                                                perm='group.write')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.write'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.write'
 
     def test_inactive_user_group_does_not_affect_repo_group_permissions_inverse(self):
         self.ug1 = fixture.create_user_group('G1')
@@ -509,9 +508,9 @@
         RepoGroupModel().grant_user_permission(self.g1,
                                                user='default',
                                                perm='group.admin')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories_groups'].get('group1') == 'group.admin'
+        assert u1_auth.repository_group_permissions.get('group1') == 'group.admin'
 
     def test_inactive_user_group_does_not_affect_user_group_permissions(self):
         self.ug1 = fixture.create_user_group('G1')
@@ -529,10 +528,10 @@
         UserGroupModel().grant_user_permission(self.ug2,
                                                user='default',
                                                perm='usergroup.write')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['user_groups']['G1'] == 'usergroup.read'
-        assert u1_auth.permissions['user_groups']['G2'] == 'usergroup.write'
+        assert u1_auth.user_group_permissions['G1'] == 'usergroup.read'
+        assert u1_auth.user_group_permissions['G2'] == 'usergroup.write'
 
     def test_inactive_user_group_does_not_affect_user_group_permissions_inverse(self):
         self.ug1 = fixture.create_user_group('G1')
@@ -550,20 +549,20 @@
         UserGroupModel().grant_user_permission(self.ug2,
                                                user='default',
                                                perm='usergroup.admin')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['user_groups']['G1'] == 'usergroup.read'
-        assert u1_auth.permissions['user_groups']['G2'] == 'usergroup.admin'
+        assert u1_auth.user_group_permissions['G1'] == 'usergroup.read'
+        assert u1_auth.user_group_permissions['G2'] == 'usergroup.admin'
 
     def test_owner_permissions_doesnot_get_overwritten_by_group(self):
         # create repo as USER,
         self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
-                                             cur_user=self.u1)
+                                             cur_user=self.u1.username)
 
         # he has permissions of admin as owner
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.admin'
+        assert u1_auth.repository_permissions['myownrepo'] == 'repository.admin'
         # set his permission as user group, he should still be admin
         self.ug1 = fixture.create_user_group('G1')
         UserGroupModel().add_user_to_group(self.ug1, self.u1)
@@ -571,31 +570,31 @@
                                                  group_name=self.ug1,
                                                  perm='repository.none')
 
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.admin'
+        assert u1_auth.repository_permissions['myownrepo'] == 'repository.admin'
 
     def test_owner_permissions_doesnot_get_overwritten_by_others(self):
         # create repo as USER,
         self.test_repo = fixture.create_repo(name='myownrepo',
                                              repo_type='hg',
-                                             cur_user=self.u1)
+                                             cur_user=self.u1.username)
 
         # he has permissions of admin as owner
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.admin'
+        assert u1_auth.repository_permissions['myownrepo'] == 'repository.admin'
         # set his permission as user, he should still be admin
         RepoModel().grant_user_permission(self.test_repo, user=self.u1,
                                           perm='repository.none')
-        Session().commit()
+        meta.Session().commit()
         u1_auth = AuthUser(user_id=self.u1.user_id)
-        assert u1_auth.permissions['repositories']['myownrepo'] == 'repository.admin'
+        assert u1_auth.repository_permissions['myownrepo'] == 'repository.admin'
 
     def _test_def_perm_equal(self, user, change_factor=0):
-        perms = UserToPerm.query() \
-                .filter(UserToPerm.user == user) \
+        perms = db.UserToPerm.query() \
+                .filter(db.UserToPerm.user == user) \
                 .all()
-        assert len(perms) == len(Permission.DEFAULT_USER_PERMISSIONS,)+change_factor, perms
+        assert len(perms) == len(db.Permission.DEFAULT_USER_PERMISSIONS,)+change_factor, perms
 
     def test_set_default_permissions(self):
         PermissionModel().create_default_permissions(user=self.u1)
@@ -605,11 +604,11 @@
         PermissionModel().create_default_permissions(user=self.u1)
         self._test_def_perm_equal(user=self.u1)
         # now we delete one, it should be re-created after another call
-        perms = UserToPerm.query() \
-                .filter(UserToPerm.user == self.u1) \
+        perms = db.UserToPerm.query() \
+                .filter(db.UserToPerm.user == self.u1) \
                 .all()
-        Session().delete(perms[0])
-        Session().commit()
+        meta.Session().delete(perms[0])
+        meta.Session().commit()
 
         self._test_def_perm_equal(user=self.u1, change_factor=-1)
 
@@ -629,18 +628,18 @@
         PermissionModel().create_default_permissions(user=self.u1)
         self._test_def_perm_equal(user=self.u1)
 
-        old = Permission.get_by_key(perm)
-        new = Permission.get_by_key(modify_to)
+        old = db.Permission.get_by_key(perm)
+        new = db.Permission.get_by_key(modify_to)
         assert old is not None
         assert new is not None
 
         # now modify permissions
-        p = UserToPerm.query() \
-                .filter(UserToPerm.user == self.u1) \
-                .filter(UserToPerm.permission == old) \
+        p = db.UserToPerm.query() \
+                .filter(db.UserToPerm.user == self.u1) \
+                .filter(db.UserToPerm.permission == old) \
                 .one()
         p.permission = new
-        Session().commit()
+        meta.Session().commit()
 
         PermissionModel().create_default_permissions(user=self.u1)
         self._test_def_perm_equal(user=self.u1)
--- a/kallithea/tests/models/test_repo_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_repo_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -3,9 +3,8 @@
 import pytest
 from sqlalchemy.exc import IntegrityError
 
-from kallithea.model import db
-from kallithea.model.db import RepoGroup
-from kallithea.model.meta import Session
+import kallithea
+from kallithea.model import db, meta
 from kallithea.model.repo import RepoModel
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.tests import base
@@ -43,7 +42,7 @@
         self.g3 = fixture.create_repo_group('test3', skip_if_exists=True)
 
     def teardown_method(self, method):
-        Session.remove()
+        meta.Session.remove()
 
     def __check_path(self, *path):
         """
@@ -58,7 +57,7 @@
 
     def test_create_group(self):
         g = fixture.create_repo_group('newGroup')
-        Session().commit()
+        meta.Session().commit()
         assert g.full_path == 'newGroup'
 
         assert self.__check_path('newGroup')
@@ -66,7 +65,7 @@
         # test_create_same_name_group
         with pytest.raises(IntegrityError):
             fixture.create_repo_group('newGroup')
-        Session().rollback()
+        meta.Session().rollback()
 
     def test_same_subgroup(self):
         sg1 = fixture.create_repo_group('sub1', parent_group_id=self.g1.group_id)
@@ -83,13 +82,13 @@
         sg1 = fixture.create_repo_group('deleteme')
         self.__delete_group(sg1.group_id)
 
-        assert RepoGroup.get(sg1.group_id) is None
+        assert db.RepoGroup.get(sg1.group_id) is None
         assert not self.__check_path('deteteme')
 
         sg1 = fixture.create_repo_group('deleteme', parent_group_id=self.g1.group_id)
         self.__delete_group(sg1.group_id)
 
-        assert RepoGroup.get(sg1.group_id) is None
+        assert db.RepoGroup.get(sg1.group_id) is None
         assert not self.__check_path('test1', 'deteteme')
 
     def test_rename_single_group(self):
@@ -97,7 +96,7 @@
 
         new_sg1 = _update_repo_group(sg1.group_id, 'after')
         assert self.__check_path('after')
-        assert RepoGroup.get_by_group_name('initial') is None
+        assert db.RepoGroup.get_by_group_name('initial') is None
 
     def test_update_group_parent(self):
 
@@ -105,16 +104,16 @@
 
         new_sg1 = _update_repo_group(sg1.group_id, 'after', parent_id=self.g1.group_id)
         assert self.__check_path('test1', 'after')
-        assert RepoGroup.get_by_group_name('test1/initial') is None
+        assert db.RepoGroup.get_by_group_name('test1/initial') is None
 
         new_sg1 = _update_repo_group(sg1.group_id, 'after', parent_id=self.g3.group_id)
         assert self.__check_path('test3', 'after')
-        assert RepoGroup.get_by_group_name('test3/initial') == None
+        assert db.RepoGroup.get_by_group_name('test3/initial') == None
 
         new_sg1 = _update_repo_group(sg1.group_id, 'hello')
         assert self.__check_path('hello')
 
-        assert RepoGroup.get_by_group_name('hello') == new_sg1
+        assert db.RepoGroup.get_by_group_name('hello') == new_sg1
 
     def test_subgrouping_with_repo(self):
 
@@ -126,14 +125,14 @@
         assert r.repo_name == 'john'
         # put repo into group
         r = _update_repo('john', repo_group=g1.group_id)
-        Session().commit()
+        meta.Session().commit()
         assert r.repo_name == 'g1/john'
 
         _update_repo_group(g1.group_id, 'g1', parent_id=g2.group_id)
         assert self.__check_path('g2', 'g1')
 
         # test repo
-        assert r.repo_name == db.URL_SEP.join(['g2', 'g1', r.just_name])
+        assert r.repo_name == kallithea.URL_SEP.join(['g2', 'g1', r.just_name])
 
     def test_move_to_root(self):
         g1 = fixture.create_repo_group('t11')
@@ -143,7 +142,7 @@
         assert self.__check_path('t11', 't22')
 
         g2 = _update_repo_group(g2.group_id, 'g22', parent_id=None)
-        Session().commit()
+        meta.Session().commit()
 
         assert g2.group_name == 'g22'
         # we moved out group from t1 to '' so it's full path should be 'g2'
@@ -160,7 +159,7 @@
 
         ## rename L1 all groups should be now changed
         _update_repo_group(g1.group_id, 'L1_NEW')
-        Session().commit()
+        meta.Session().commit()
         assert g1.full_path == 'L1_NEW'
         assert g2.full_path == 'L1_NEW/L2'
         assert g3.full_path == 'L1_NEW/L2/L3'
@@ -175,7 +174,7 @@
         r = fixture.create_repo('R1/R2/R3/R3_REPO', repo_group=g3.group_id)
         ## rename L1 all groups should be now changed
         _update_repo_group(g1.group_id, 'R1', parent_id=g4.group_id)
-        Session().commit()
+        meta.Session().commit()
         assert g1.full_path == 'R1_NEW/R1'
         assert g2.full_path == 'R1_NEW/R1/R2'
         assert g3.full_path == 'R1_NEW/R1/R2/R3'
@@ -191,7 +190,7 @@
 
         ## rename L1 all groups should be now changed
         _update_repo_group(g1.group_id, 'X1_PRIM', parent_id=g4.group_id)
-        Session().commit()
+        meta.Session().commit()
         assert g1.full_path == 'X1_NEW/X1_PRIM'
         assert g2.full_path == 'X1_NEW/X1_PRIM/X2'
         assert g3.full_path == 'X1_NEW/X1_PRIM/X2/X3'
--- a/kallithea/tests/models/test_repos.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_repos.py	Thu May 27 21:27:37 2021 +0200
@@ -1,8 +1,7 @@
 import pytest
 
 from kallithea.lib.exceptions import AttachedForksError
-from kallithea.model.db import Repository
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo import RepoModel
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
@@ -14,68 +13,68 @@
 class TestRepos(base.TestController):
 
     def teardown_method(self, method):
-        Session.remove()
+        meta.Session.remove()
 
     def test_remove_repo(self):
         repo = fixture.create_repo(name='test-repo-1')
-        Session().commit()
+        meta.Session().commit()
 
         RepoModel().delete(repo=repo)
-        Session().commit()
+        meta.Session().commit()
 
-        assert Repository.get_by_repo_name(repo_name='test-repo-1') is None
+        assert db.Repository.get_by_repo_name(repo_name='test-repo-1') is None
 
     def test_remove_repo_repo_raises_exc_when_attached_forks(self):
         repo = fixture.create_repo(name='test-repo-1')
-        Session().commit()
+        meta.Session().commit()
 
         fixture.create_fork(repo.repo_name, 'test-repo-fork-1')
-        Session().commit()
+        meta.Session().commit()
 
         with pytest.raises(AttachedForksError):
             RepoModel().delete(repo=repo)
         # cleanup
         RepoModel().delete(repo='test-repo-fork-1')
         RepoModel().delete(repo='test-repo-1')
-        Session().commit()
+        meta.Session().commit()
 
     def test_remove_repo_delete_forks(self):
         repo = fixture.create_repo(name='test-repo-1')
-        Session().commit()
+        meta.Session().commit()
 
         fork = fixture.create_fork(repo.repo_name, 'test-repo-fork-1')
-        Session().commit()
+        meta.Session().commit()
 
         # fork of fork
         fixture.create_fork(fork.repo_name, 'test-repo-fork-fork-1')
-        Session().commit()
+        meta.Session().commit()
 
         RepoModel().delete(repo=repo, forks='delete')
-        Session().commit()
+        meta.Session().commit()
 
-        assert Repository.get_by_repo_name(repo_name='test-repo-1') is None
-        assert Repository.get_by_repo_name(repo_name='test-repo-fork-1') is None
-        assert Repository.get_by_repo_name(repo_name='test-repo-fork-fork-1') is None
+        assert db.Repository.get_by_repo_name(repo_name='test-repo-1') is None
+        assert db.Repository.get_by_repo_name(repo_name='test-repo-fork-1') is None
+        assert db.Repository.get_by_repo_name(repo_name='test-repo-fork-fork-1') is None
 
     def test_remove_repo_detach_forks(self):
         repo = fixture.create_repo(name='test-repo-1')
-        Session().commit()
+        meta.Session().commit()
 
         fork = fixture.create_fork(repo.repo_name, 'test-repo-fork-1')
-        Session().commit()
+        meta.Session().commit()
 
         # fork of fork
         fixture.create_fork(fork.repo_name, 'test-repo-fork-fork-1')
-        Session().commit()
+        meta.Session().commit()
 
         RepoModel().delete(repo=repo, forks='detach')
-        Session().commit()
+        meta.Session().commit()
 
         try:
-            assert Repository.get_by_repo_name(repo_name='test-repo-1') is None
-            assert Repository.get_by_repo_name(repo_name='test-repo-fork-1') is not None
-            assert Repository.get_by_repo_name(repo_name='test-repo-fork-fork-1') is not None
+            assert db.Repository.get_by_repo_name(repo_name='test-repo-1') is None
+            assert db.Repository.get_by_repo_name(repo_name='test-repo-fork-1') is not None
+            assert db.Repository.get_by_repo_name(repo_name='test-repo-fork-fork-1') is not None
         finally:
             RepoModel().delete(repo='test-repo-fork-fork-1')
             RepoModel().delete(repo='test-repo-fork-1')
-            Session().commit()
+            meta.Session().commit()
--- a/kallithea/tests/models/test_settings.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_settings.py	Thu May 27 21:27:37 2021 +0200
@@ -1,39 +1,38 @@
-from kallithea.model.db import Setting
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 
 
 name = 'spam-setting-name'
 
 
 def test_passing_list_setting_value_results_in_string_valued_setting():
-    assert Setting.get_by_name(name) is None
-    setting = Setting.create_or_update(name, ['spam', 'eggs'])
-    Session().flush() # must flush so we can delete it below
+    assert db.Setting.get_by_name(name) is None
+    setting = db.Setting.create_or_update(name, ['spam', 'eggs'])
+    meta.Session().flush() # must flush so we can delete it below
     try:
-        assert Setting.get_by_name(name) is not None
+        assert db.Setting.get_by_name(name) is not None
         # Quirk: list value is stringified.
-        assert Setting.get_by_name(name).app_settings_value \
+        assert db.Setting.get_by_name(name).app_settings_value \
                == "['spam', 'eggs']"
-        assert Setting.get_by_name(name).app_settings_type == 'unicode'
+        assert db.Setting.get_by_name(name).app_settings_type == 'unicode'
     finally:
-        Session().delete(setting)
+        meta.Session().delete(setting)
 
 
 def test_list_valued_setting_creation_requires_manual_value_formatting():
-    assert Setting.get_by_name(name) is None
+    assert db.Setting.get_by_name(name) is None
     # Quirk: need manual formatting of list setting value.
-    setting = Setting.create_or_update(name, 'spam,eggs', type='list')
-    Session().flush() # must flush so we can delete it below
+    setting = db.Setting.create_or_update(name, 'spam,eggs', type='list')
+    meta.Session().flush() # must flush so we can delete it below
     try:
         assert setting.app_settings_value == ['spam', 'eggs']
     finally:
-        Session().delete(setting)
+        meta.Session().delete(setting)
 
 
 def test_list_valued_setting_update():
-    assert Setting.get_by_name(name) is None
-    setting = Setting.create_or_update(name, 'spam', type='list')
-    Session().flush() # must flush so we can delete it below
+    assert db.Setting.get_by_name(name) is None
+    setting = db.Setting.create_or_update(name, 'spam', type='list')
+    meta.Session().flush() # must flush so we can delete it below
     try:
         assert setting.app_settings_value == ['spam']
         # Assign back setting value.
@@ -43,4 +42,4 @@
         setting.app_settings_value = setting.app_settings_value
         assert setting.app_settings_value == ["[\"['spam']\"]"]
     finally:
-        Session().delete(setting)
+        meta.Session().delete(setting)
--- a/kallithea/tests/models/test_user_group_permissions_on_repo_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_user_group_permissions_on_repo_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -1,7 +1,6 @@
 import functools
 
-from kallithea.model.db import RepoGroup
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user_group import UserGroupModel
 from kallithea.tests.fixture import Fixture
@@ -20,7 +19,7 @@
     """
     Resets all permissions to perm attribute
     """
-    repo_group = RepoGroup.get_by_group_name(group_name=group_name)
+    repo_group = db.RepoGroup.get_by_group_name(group_name=group_name)
     if not repo_group:
         raise Exception('Cannot get group %s' % group_name)
 
@@ -34,20 +33,20 @@
     RepoGroupModel()._update_permissions(repo_group,
                                          perms_updates=perms_updates,
                                          recursive=recursive, check_perms=False)
-    Session().commit()
+    meta.Session().commit()
 
 
 def setup_module():
     global test_u2_id, test_u2_gr_id, _get_repo_perms, _get_group_perms
     test_u2 = _create_project_tree()
-    Session().commit()
+    meta.Session().commit()
     test_u2_id = test_u2.user_id
 
     gr1 = fixture.create_user_group('perms_group_1')
-    Session().commit()
+    meta.Session().commit()
     test_u2_gr_id = gr1.users_group_id
     UserGroupModel().add_user_to_group(gr1, user=test_u2_id)
-    Session().commit()
+    meta.Session().commit()
 
     _get_repo_perms = functools.partial(_get_perms, key='repositories',
                                         test_u1_id=test_u2_id)
--- a/kallithea/tests/models/test_user_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_user_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -1,5 +1,4 @@
-from kallithea.model.db import User, UserGroup
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.user_group import UserGroupModel
 from kallithea.tests import base
 from kallithea.tests.fixture import Fixture
@@ -12,9 +11,9 @@
 
     def teardown_method(self, method):
         # delete all groups
-        for gr in UserGroup.query():
+        for gr in db.UserGroup.query():
             fixture.destroy_user_group(gr)
-        Session().commit()
+        meta.Session().commit()
 
     @base.parametrize('pre_existing,regular_should_be,external_should_be,groups,expected', [
         ([], [], [], [], []),
@@ -28,32 +27,32 @@
     def test_enforce_groups(self, pre_existing, regular_should_be,
                             external_should_be, groups, expected):
         # delete all groups
-        for gr in UserGroup.query():
+        for gr in db.UserGroup.query():
             fixture.destroy_user_group(gr)
-        Session().commit()
+        meta.Session().commit()
 
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         for gr in pre_existing:
             gr = fixture.create_user_group(gr)
-        Session().commit()
+        meta.Session().commit()
 
         # make sure use is just in those groups
         for gr in regular_should_be:
             gr = fixture.create_user_group(gr)
-            Session().commit()
+            meta.Session().commit()
             UserGroupModel().add_user_to_group(gr, user)
-            Session().commit()
+            meta.Session().commit()
 
         # now special external groups created by auth plugins
         for gr in external_should_be:
             gr = fixture.create_user_group(gr, user_group_data={'extern_type': 'container'})
-            Session().commit()
+            meta.Session().commit()
             UserGroupModel().add_user_to_group(gr, user)
-            Session().commit()
+            meta.Session().commit()
 
         UserGroupModel().enforce_groups(user, groups, 'container')
-        Session().commit()
+        meta.Session().commit()
 
-        user = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        user = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
         in_groups = user.group_member
         assert sorted(expected) == sorted(x.users_group.users_group_name for x in in_groups)
--- a/kallithea/tests/models/test_user_permissions_on_repo_groups.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_user_permissions_on_repo_groups.py	Thu May 27 21:27:37 2021 +0200
@@ -1,8 +1,7 @@
 import functools
 
 import kallithea
-from kallithea.model.db import RepoGroup, Repository
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.tests.models.common import _check_expected_count, _create_project_tree, _destroy_project_tree, _get_perms, check_tree_perms, expected_count
 
@@ -22,7 +21,7 @@
         permissions_setup_func(group_name, perm, recursive,
                                user_id=kallithea.DEFAULT_USER_ID)
 
-    repo_group = RepoGroup.get_by_group_name(group_name=group_name)
+    repo_group = db.RepoGroup.get_by_group_name(group_name=group_name)
     if not repo_group:
         raise Exception('Cannot get group %s' % group_name)
 
@@ -36,13 +35,13 @@
     RepoGroupModel()._update_permissions(repo_group,
                                          perms_updates=perms_updates,
                                          recursive=recursive, check_perms=False)
-    Session().commit()
+    meta.Session().commit()
 
 
 def setup_module():
     global test_u1_id, _get_repo_perms, _get_group_perms
     test_u1 = _create_project_tree()
-    Session().commit()
+    meta.Session().commit()
     test_u1_id = test_u1.user_id
     _get_repo_perms = functools.partial(_get_perms, key='repositories',
                                         test_u1_id=test_u1_id)
@@ -133,7 +132,7 @@
 
     for name, perm in repo_items:
         # default user permissions do not "recurse into" private repos
-        is_private = Repository.get_by_repo_name(name).private
+        is_private = db.Repository.get_by_repo_name(name).private
         check_tree_perms(name, perm, group, 'repository.none' if is_private else 'repository.write')
 
     for name, perm in items:
--- a/kallithea/tests/models/test_user_ssh_keys.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_user_ssh_keys.py	Thu May 27 21:27:37 2021 +0200
@@ -1,4 +1,4 @@
-from kallithea.model.db import UserSshKeys
+from kallithea.model import db
 from kallithea.tests.base import TestController
 from kallithea.tests.fixture import Fixture
 
@@ -11,7 +11,7 @@
 class TestUserSshKeys(TestController):
 
     def test_fingerprint_generation(self):
-        key_model = UserSshKeys()
+        key_model = db.UserSshKeys()
         key_model.public_key = public_key
         expected = 'Ke3oUCNJM87P0jJTb3D+e3shjceP2CqMpQKVd75E9I8'
         assert expected == key_model.fingerprint
--- a/kallithea/tests/models/test_users.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/models/test_users.py	Thu May 27 21:27:37 2021 +0200
@@ -1,7 +1,6 @@
 import pytest
 
-from kallithea.model.db import Permission, User, UserEmailMap, UserGroup, UserGroupMember
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.user import UserModel
 from kallithea.model.user_group import UserGroupModel
 from kallithea.tests import base
@@ -15,90 +14,90 @@
 
     @classmethod
     def setup_class(cls):
-        Session.remove()
+        meta.Session.remove()
 
     def teardown_method(self, method):
-        Session.remove()
+        meta.Session.remove()
 
     def test_create_and_remove(self):
         usr = UserModel().create_or_update(username='test_user',
                                            password='qweqwe',
                                            email='u232@example.com',
                                            firstname='u1', lastname='u1')
-        Session().commit()
-        assert User.get_by_username('test_user') == usr
-        assert User.get_by_username('test_USER', case_insensitive=True) == usr
+        meta.Session().commit()
+        assert db.User.get_by_username('test_user') == usr
+        assert db.User.get_by_username('test_USER', case_insensitive=True) == usr
         # User.get_by_username without explicit request for case insensitivty
         # will use database case sensitivity. The following will thus return
         # None on for example PostgreSQL but find test_user on MySQL - we are
         # fine with leaving that as undefined as long as it doesn't crash.
-        User.get_by_username('test_USER', case_insensitive=False)
+        db.User.get_by_username('test_USER', case_insensitive=False)
 
         # make user group
         user_group = fixture.create_user_group('some_example_group')
-        Session().commit()
+        meta.Session().commit()
 
         UserGroupModel().add_user_to_group(user_group, usr)
-        Session().commit()
+        meta.Session().commit()
 
-        assert UserGroup.get(user_group.users_group_id) == user_group
-        assert UserGroupMember.query().count() == 1
+        assert db.UserGroup.get(user_group.users_group_id) == user_group
+        assert db.UserGroupMember.query().count() == 1
         UserModel().delete(usr.user_id)
-        Session().commit()
+        meta.Session().commit()
 
-        assert UserGroupMember.query().all() == []
+        assert db.UserGroupMember.query().all() == []
 
     def test_additional_email_as_main(self):
         usr = UserModel().create_or_update(username='test_user',
                                            password='qweqwe',
                                      email='main_email@example.com',
                                      firstname='u1', lastname='u1')
-        Session().commit()
+        meta.Session().commit()
 
         with pytest.raises(AttributeError):
-            m = UserEmailMap()
+            m = db.UserEmailMap()
             m.email = 'main_email@example.com'
             m.user = usr
-            Session().add(m)
-            Session().commit()
+            meta.Session().add(m)
+            meta.Session().commit()
 
         UserModel().delete(usr.user_id)
-        Session().commit()
+        meta.Session().commit()
 
     def test_extra_email_map(self):
         usr = UserModel().create_or_update(username='test_user',
                                            password='qweqwe',
                                      email='main_email@example.com',
                                      firstname='u1', lastname='u1')
-        Session().commit()
+        meta.Session().commit()
 
-        m = UserEmailMap()
+        m = db.UserEmailMap()
         m.email = 'main_email2@example.com'
         m.user = usr
-        Session().add(m)
-        Session().commit()
+        meta.Session().add(m)
+        meta.Session().commit()
 
-        u = User.get_by_email(email='MAIN_email@example.com')
+        u = db.User.get_by_email(email='MAIN_email@example.com')
         assert usr.user_id == u.user_id
         assert usr.username == u.username
 
-        u = User.get_by_email(email='main_email@example.com')
+        u = db.User.get_by_email(email='main_email@example.com')
         assert usr.user_id == u.user_id
         assert usr.username == u.username
 
-        u = User.get_by_email(email='main_email2@example.com')
+        u = db.User.get_by_email(email='main_email2@example.com')
         assert usr.user_id == u.user_id
         assert usr.username == u.username
-        u = User.get_by_email(email='main_email3@example.com')
+        u = db.User.get_by_email(email='main_email3@example.com')
         assert u is None
 
-        u = User.get_by_email(email='main_e%ail@example.com')
+        u = db.User.get_by_email(email='main_e%ail@example.com')
         assert u is None
-        u = User.get_by_email(email='main_emai_@example.com')
+        u = db.User.get_by_email(email='main_emai_@example.com')
         assert u is None
 
         UserModel().delete(usr.user_id)
-        Session().commit()
+        meta.Session().commit()
 
 
 class TestUsers(base.TestController):
@@ -110,33 +109,33 @@
                                         firstname='u1', lastname='u1')
 
     def teardown_method(self, method):
-        perm = Permission.query().all()
+        perm = db.Permission.query().all()
         for p in perm:
             UserModel().revoke_perm(self.u1, p)
 
         UserModel().delete(self.u1)
-        Session().commit()
-        Session.remove()
+        meta.Session().commit()
+        meta.Session.remove()
 
     def test_add_perm(self):
-        perm = Permission.query().all()[0]
+        perm = db.Permission.query().all()[0]
         UserModel().grant_perm(self.u1, perm)
-        Session().commit()
+        meta.Session().commit()
         assert UserModel().has_perm(self.u1, perm) == True
 
     def test_has_perm(self):
-        perm = Permission.query().all()
+        perm = db.Permission.query().all()
         for p in perm:
             has_p = UserModel().has_perm(self.u1, p)
             assert False == has_p
 
     def test_revoke_perm(self):
-        perm = Permission.query().all()[0]
+        perm = db.Permission.query().all()[0]
         UserModel().grant_perm(self.u1, perm)
-        Session().commit()
+        meta.Session().commit()
         assert UserModel().has_perm(self.u1, perm) == True
 
         # revoke
         UserModel().revoke_perm(self.u1, perm)
-        Session().commit()
+        meta.Session().commit()
         assert UserModel().has_perm(self.u1, perm) == False
--- a/kallithea/tests/other/test_auth_ldap.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/other/test_auth_ldap.py	Thu May 27 21:27:37 2021 +0200
@@ -3,7 +3,7 @@
 import pytest
 
 from kallithea.lib.auth_modules import auth_ldap, authenticate
-from kallithea.model.db import User
+from kallithea.model import db
 
 
 @pytest.fixture
@@ -35,7 +35,7 @@
     # Arrange test user.
     uniqifier = uuid.uuid4()
     username = 'test-user-{0}'.format(uniqifier)
-    assert User.get_by_username(username) is None
+    assert db.User.get_by_username(username) is None
     user_input = dict(username='test-user-{0}'.format(uniqifier),
                       password='spam password',
                       email='spam-email-{0}'.format(uniqifier),
@@ -72,7 +72,7 @@
     # Arrange test user.
     uniqifier = uuid.uuid4()
     username = 'test-user-{0}'.format(uniqifier)
-    assert User.get_by_username(username) is None
+    assert db.User.get_by_username(username) is None
 
     # Arrange LDAP auth.
     monkeypatch.setattr(auth_ldap, 'AuthLdap', _AuthLdapMock)
@@ -89,7 +89,7 @@
 
     # Verify that authentication created new user with attributes
     # retrieved from LDAP.
-    new_user = User.get_by_username(username)
+    new_user = db.User.get_by_username(username)
     assert new_user is not None
     assert new_user.firstname == 'spam ldap first name'
     assert new_user.lastname == 'spam ldap last name'
@@ -115,7 +115,7 @@
     # Arrange test user.
     uniqifier = uuid.uuid4()
     username = 'test-user-{0}'.format(uniqifier)
-    assert User.get_by_username(username) is None
+    assert db.User.get_by_username(username) is None
 
     # Arrange LDAP auth.
     monkeypatch.setattr(auth_ldap, 'AuthLdap', _AuthLdapNoEmailMock)
@@ -132,7 +132,7 @@
 
     # Verify that authentication created new user with attributes
     # retrieved from LDAP, with email == None.
-    new_user = User.get_by_username(username)
+    new_user = db.User.get_by_username(username)
     assert new_user is not None
     assert new_user.firstname == 'spam ldap first name'
     assert new_user.lastname == 'spam ldap last name'
--- a/kallithea/tests/other/test_libs.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/other/test_libs.py	Thu May 27 21:27:37 2021 +0200
@@ -27,12 +27,18 @@
 
 import datetime
 import hashlib
+import re
 
 import mock
+import routes
+from dateutil import relativedelta
+from tg import request
 from tg.util.webtest import test_context
 
-from kallithea.lib.utils2 import AttributeDict, safe_bytes
-from kallithea.model.db import Repository
+import kallithea.lib.helpers as h
+from kallithea.lib import webutils
+from kallithea.lib.utils2 import AttributeDict, get_clone_url, safe_bytes
+from kallithea.model import db
 from kallithea.tests import base
 
 
@@ -71,26 +77,6 @@
 ]
 
 
-class FakeUrlGenerator(object):
-
-    def __init__(self, current_url=None, default_route=None, **routes):
-        """Initialize using specified 'current' URL template,
-        default route template, and all other aguments describing known
-        routes (format: route=template)"""
-        self.current_url = current_url
-        self.default_route = default_route
-        self.routes = routes
-
-    def __call__(self, route_name, *args, **kwargs):
-        if route_name in self.routes:
-            return self.routes[route_name] % kwargs
-
-        return self.default_route % kwargs
-
-    def current(self, *args, **kwargs):
-        return self.current_url % kwargs
-
-
 class TestLibs(base.TestController):
 
     @base.parametrize('test_url,expected,expected_creds', TEST_URLS)
@@ -119,15 +105,12 @@
                            ('F', False),
                            ('FALSE', False),
                            ('0', False),
-                           ('-1', False),
-                           ('', False)
     ])
-    def test_str2bool(self, str_bool, expected):
-        from kallithea.lib.utils2 import str2bool
-        assert str2bool(str_bool) == expected
+    def test_asbool(self, str_bool, expected):
+        from kallithea.lib.utils2 import asbool
+        assert asbool(str_bool) == expected
 
     def test_mention_extractor(self):
-        from kallithea.lib.utils2 import extract_mentioned_usernames
         sample = (
             "@first hi there @world here's my email username@example.com "
             "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three "
@@ -139,7 +122,7 @@
         expected = set([
             '2one_more22', 'first', 'lukaszb', 'one', 'one_more22', 'UPPER', 'cAmEL', 'john',
             'marian.user', 'marco-polo', 'marco_polo', 'world'])
-        assert expected == set(extract_mentioned_usernames(sample))
+        assert expected == set(webutils.extract_mentioned_usernames(sample))
 
     @base.parametrize('age_args,expected', [
         (dict(), 'just now'),
@@ -158,8 +141,7 @@
         (dict(years= -3, months= -2), '3 years and 2 months ago'),
     ])
     def test_age(self, age_args, expected):
-        from kallithea.lib.utils2 import age
-        from dateutil import relativedelta
+        from kallithea.lib.webutils import age
         with test_context(self.app):
             n = datetime.datetime(year=2012, month=5, day=17)
             delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
@@ -183,8 +165,7 @@
         (dict(years= -4, months= -8), '5 years ago'),
     ])
     def test_age_short(self, age_args, expected):
-        from kallithea.lib.utils2 import age
-        from dateutil import relativedelta
+        from kallithea.lib.webutils import age
         with test_context(self.app):
             n = datetime.datetime(year=2012, month=5, day=17)
             delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
@@ -202,8 +183,7 @@
         (dict(years=1, months=1), 'in 1 year and 1 month')
     ])
     def test_age_in_future(self, age_args, expected):
-        from kallithea.lib.utils2 import age
-        from dateutil import relativedelta
+        from kallithea.lib.webutils import age
         with test_context(self.app):
             n = datetime.datetime(year=2012, month=5, day=17)
             delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
@@ -216,8 +196,7 @@
             "[requires => url] [lang => python] [just a tag]"
             "[,d] [ => ULR ] [obsolete] [desc]]"
         )
-        from kallithea.lib.helpers import urlify_text
-        res = urlify_text(sample, stylize=True)
+        res = webutils.urlify_text(sample, stylize=True)
         assert '<div class="label label-meta" data-tag="tag">tag</div>' in res
         assert '<div class="label label-meta" data-tag="obsolete">obsolete</div>' in res
         assert '<div class="label label-meta" data-tag="stale">stale</div>' in res
@@ -226,7 +205,6 @@
         assert '<div class="label label-meta" data-tag="tag">tag</div>' in res
 
     def test_alternative_gravatar(self):
-        from kallithea.lib.helpers import gravatar_url
         _md5 = lambda s: hashlib.md5(safe_bytes(s)).hexdigest()
 
         # mock tg.tmpl_context
@@ -238,46 +216,44 @@
 
             return _c
 
-        fake_url = FakeUrlGenerator(current_url='https://example.com')
-        with mock.patch('kallithea.config.routing.url', fake_url):
+        with mock.patch('kallithea.lib.webutils.url.current', lambda *a, **b: 'https://example.com'):
             fake = fake_tmpl_context(_url='http://example.com/{email}')
-            with mock.patch('tg.tmpl_context', fake):
-                    from kallithea.config.routing import url
-                    assert url.current() == 'https://example.com'
-                    grav = gravatar_url(email_address='test@example.com', size=24)
+            with mock.patch('kallithea.lib.helpers.c', fake):
+                    assert webutils.url.current() == 'https://example.com'
+                    grav = h.gravatar_url(email_address='test@example.com', size=24)
                     assert grav == 'http://example.com/test@example.com'
 
             fake = fake_tmpl_context(_url='http://example.com/{email}')
-            with mock.patch('tg.tmpl_context', fake):
-                grav = gravatar_url(email_address='test@example.com', size=24)
+            with mock.patch('kallithea.lib.helpers.c', fake):
+                grav = h.gravatar_url(email_address='test@example.com', size=24)
                 assert grav == 'http://example.com/test@example.com'
 
             fake = fake_tmpl_context(_url='http://example.com/{md5email}')
-            with mock.patch('tg.tmpl_context', fake):
+            with mock.patch('kallithea.lib.helpers.c', fake):
                 em = 'test@example.com'
-                grav = gravatar_url(email_address=em, size=24)
+                grav = h.gravatar_url(email_address=em, size=24)
                 assert grav == 'http://example.com/%s' % (_md5(em))
 
             fake = fake_tmpl_context(_url='http://example.com/{md5email}/{size}')
-            with mock.patch('tg.tmpl_context', fake):
+            with mock.patch('kallithea.lib.helpers.c', fake):
                 em = 'test@example.com'
-                grav = gravatar_url(email_address=em, size=24)
+                grav = h.gravatar_url(email_address=em, size=24)
                 assert grav == 'http://example.com/%s/%s' % (_md5(em), 24)
 
             fake = fake_tmpl_context(_url='{scheme}://{netloc}/{md5email}/{size}')
-            with mock.patch('tg.tmpl_context', fake):
+            with mock.patch('kallithea.lib.helpers.c', fake):
                 em = 'test@example.com'
-                grav = gravatar_url(email_address=em, size=24)
+                grav = h.gravatar_url(email_address=em, size=24)
                 assert grav == 'https://example.com/%s/%s' % (_md5(em), 24)
 
     @base.parametrize('clone_uri_tmpl,repo_name,username,prefix,expected', [
-        (Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '', 'http://vps1:8000/group/repo1'),
-        (Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '', 'http://username@vps1:8000/group/repo1'),
-        (Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '/prefix', 'http://vps1:8000/prefix/group/repo1'),
-        (Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix', 'http://user@vps1:8000/prefix/group/repo1'),
-        (Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix', 'http://username@vps1:8000/prefix/group/repo1'),
-        (Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix/', 'http://user@vps1:8000/prefix/group/repo1'),
-        (Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix/', 'http://username@vps1:8000/prefix/group/repo1'),
+        (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '', 'http://vps1:8000/group/repo1'),
+        (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '', 'http://username@vps1:8000/group/repo1'),
+        (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '/prefix', 'http://vps1:8000/prefix/group/repo1'),
+        (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix', 'http://user@vps1:8000/prefix/group/repo1'),
+        (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix', 'http://username@vps1:8000/prefix/group/repo1'),
+        (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix/', 'http://user@vps1:8000/prefix/group/repo1'),
+        (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix/', 'http://username@vps1:8000/prefix/group/repo1'),
         ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', None, '', 'http://vps1:8000/_23'),
         ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://username@vps1:8000/_23'),
         ('http://{user}@{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://username@vps1:8000/_23'),
@@ -287,7 +263,6 @@
         ('https://proxy1.example.com/{user}/{repo}', 'group/repo1', 'username', '', 'https://proxy1.example.com/username/group/repo1'),
     ])
     def test_clone_url_generator(self, clone_uri_tmpl, repo_name, username, prefix, expected):
-        from kallithea.lib.utils2 import get_clone_url
         clone_url = get_clone_url(clone_uri_tmpl=clone_uri_tmpl, prefix_url='http://vps1:8000' + prefix,
                                   repo_name=repo_name, repo_id=23, username=username)
         assert clone_url == expected
@@ -298,7 +273,6 @@
 
         :param text:
         """
-        import re
         # quickly change expected url[] into a link
         url_pattern = re.compile(r'(?:url\[)(.+?)(?:\])')
 
@@ -336,10 +310,11 @@
     ])
     def test_urlify_text(self, sample, expected):
         expected = self._quick_url(expected)
-        fake_url = FakeUrlGenerator(changeset_home='/%(repo_name)s/changeset/%(revision)s')
-        with mock.patch('kallithea.config.routing.url', fake_url):
-            from kallithea.lib.helpers import urlify_text
-            assert urlify_text(sample, 'repo_name') == expected
+
+        with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__',
+            lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs,
+        ):
+            assert webutils.urlify_text(sample, 'repo_name') == expected
 
     @base.parametrize('sample,expected,url_', [
       ("",
@@ -391,10 +366,10 @@
     def test_urlify_test(self, sample, expected, url_):
         expected = self._quick_url(expected,
                                    tmpl="""<a href="%s">%s</a>""", url_=url_)
-        fake_url = FakeUrlGenerator(changeset_home='/%(repo_name)s/changeset/%(revision)s')
-        with mock.patch('kallithea.config.routing.url', fake_url):
-            from kallithea.lib.helpers import urlify_text
-            assert urlify_text(sample, 'repo_name', stylize=True) == expected
+        with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__',
+            lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs,
+        ):
+            assert webutils.urlify_text(sample, 'repo_name', stylize=True) == expected
 
     @base.parametrize('sample,expected', [
       ("deadbeefcafe @mention, and http://foo.bar/ yo",
@@ -404,10 +379,10 @@
        """<a class="message-link" href="#the-link"> yo</a>"""),
     ])
     def test_urlify_link(self, sample, expected):
-        fake_url = FakeUrlGenerator(changeset_home='/%(repo_name)s/changeset/%(revision)s')
-        with mock.patch('kallithea.config.routing.url', fake_url):
-            from kallithea.lib.helpers import urlify_text
-            assert urlify_text(sample, 'repo_name', link_='#the-link') == expected
+        with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__',
+            lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs,
+        ):
+            assert webutils.urlify_text(sample, 'repo_name', link_='#the-link') == expected
 
     @base.parametrize('issue_pat,issue_server,issue_sub,sample,expected', [
         (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1',
@@ -488,7 +463,6 @@
             """<a class="issue-tracker-link" href="http://foo/BRU/pullrequest/747/">PR-BRU-747</a>"""),
     ])
     def test_urlify_issues(self, issue_pat, issue_server, issue_sub, sample, expected):
-        from kallithea.lib.helpers import urlify_text
         config_stub = {
             'sqlalchemy.url': 'foo',
             'issue_pat': issue_pat,
@@ -496,9 +470,9 @@
             'issue_sub': issue_sub,
         }
         # force recreation of lazy function
-        with mock.patch('kallithea.lib.helpers._urlify_issues_f', None):
+        with mock.patch('kallithea.lib.webutils._urlify_issues_f', None):
             with mock.patch('kallithea.CONFIG', config_stub):
-                assert urlify_text(sample, 'repo_name') == expected
+                assert webutils.urlify_text(sample, 'repo_name') == expected
 
     @base.parametrize('sample,expected', [
         ('abc X5', 'abc <a class="issue-tracker-link" href="http://main/repo_name/main/5/">#5</a>'),
@@ -510,7 +484,6 @@
         ('issue FAILMORE89', 'issue FAILMORE89'), # no match because absent prefix
     ])
     def test_urlify_issues_multiple_issue_patterns(self, sample, expected):
-        from kallithea.lib.helpers import urlify_text
         config_stub = {
             'sqlalchemy.url': r'foo',
             'issue_pat': r'X(\d+)',
@@ -529,9 +502,9 @@
             'issue_server_link_absent_prefix': r'http://failmore/{repo}/\1',
         }
         # force recreation of lazy function
-        with mock.patch('kallithea.lib.helpers._urlify_issues_f', None):
+        with mock.patch('kallithea.lib.webutils._urlify_issues_f', None):
             with mock.patch('kallithea.CONFIG', config_stub):
-                assert urlify_text(sample, 'repo_name') == expected
+                assert webutils.urlify_text(sample, 'repo_name') == expected
 
     @base.parametrize('test,expected', [
       ("", None),
@@ -555,7 +528,7 @@
       ("_IDa", '_IDa'),
     ])
     def test_fix_repo_id_name(self, test, expected):
-        repo = Repository.get_by_repo_name(base.HG_REPO)
+        repo = db.Repository.get_by_repo_name(base.HG_REPO)
         test = test.replace('ID', str(repo.repo_id))
         expected = expected.replace('NAME', repo.repo_name).replace('ID', str(repo.repo_id))
         from kallithea.lib.utils import fix_repo_id_name
@@ -572,11 +545,7 @@
         ('http://www.example.org/kallithea/repos/', 'abc/xyz/', 'http://www.example.org/kallithea/repos/abc/xyz/'),
     ])
     def test_canonical_url(self, canonical, test, expected):
-        from kallithea.lib.helpers import canonical_url
-        from tg import request
-
         # setup url(), used by canonical_url
-        import routes
         m = routes.Mapper()
         m.connect('about', '/about-page')
         url = routes.URLGenerator(m, {'HTTP_HOST': 'http_host.example.org'})
@@ -588,7 +557,7 @@
         with test_context(self.app):
             request.environ['routes.url'] = url
             with mock.patch('kallithea.CONFIG', config_mock):
-                assert canonical_url(test) == expected
+                assert webutils.canonical_url(test) == expected
 
     @base.parametrize('canonical,expected', [
         ('http://www.example.org', 'www.example.org'),
@@ -596,11 +565,7 @@
         ('http://www.example.org/kallithea/repos/', 'www.example.org'),
     ])
     def test_canonical_hostname(self, canonical, expected):
-        from kallithea.lib.helpers import canonical_hostname
-        from tg import request
-
         # setup url(), used by canonical_hostname
-        import routes
         m = routes.Mapper()
         url = routes.URLGenerator(m, {'HTTP_HOST': 'http_host.example.org'})
 
@@ -611,4 +576,4 @@
         with test_context(self.app):
             request.environ['routes.url'] = url
             with mock.patch('kallithea.CONFIG', config_mock):
-                assert canonical_hostname() == expected
+                assert webutils.canonical_hostname() == expected
--- a/kallithea/tests/other/test_mail.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/other/test_mail.py	Thu May 27 21:27:37 2021 +0200
@@ -3,14 +3,14 @@
 import mock
 
 import kallithea
-from kallithea.model.db import User
+from kallithea.model import db
 from kallithea.tests import base
 
 
 class smtplib_mock(object):
 
     @classmethod
-    def SMTP(cls, server, port, local_hostname):
+    def SMTP(cls, server, port):
         return smtplib_mock()
 
     def ehlo(self):
@@ -21,12 +21,11 @@
 
     def sendmail(self, sender, dest, msg):
         smtplib_mock.lastsender = sender
-        smtplib_mock.lastdest = dest
+        smtplib_mock.lastdest = set(dest)
         smtplib_mock.lastmsg = msg
-        pass
 
 
-@mock.patch('kallithea.lib.rcmail.smtp_mailer.smtplib', smtplib_mock)
+@mock.patch('kallithea.model.notification.smtplib', smtplib_mock)
 class TestMail(base.TestController):
 
     def test_send_mail_trivial(self):
@@ -41,8 +40,8 @@
             'smtp_server': mailserver,
             'app_email_from': envelope_from,
         }
-        with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body)
+        with mock.patch('kallithea.model.notification.config', config_mock):
+            kallithea.model.notification.send_email(recipients, subject, body, html_body)
 
         assert smtplib_mock.lastdest == set(recipients)
         assert smtplib_mock.lastsender == envelope_from
@@ -65,8 +64,8 @@
             'app_email_from': envelope_from,
             'email_to': email_to,
         }
-        with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body)
+        with mock.patch('kallithea.model.notification.config', config_mock):
+            kallithea.model.notification.send_email(recipients, subject, body, html_body)
 
         assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL, email_to])
         assert smtplib_mock.lastsender == envelope_from
@@ -89,8 +88,8 @@
             'app_email_from': envelope_from,
             'email_to': email_to,
         }
-        with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body)
+        with mock.patch('kallithea.model.notification.config', config_mock):
+            kallithea.model.notification.send_email(recipients, subject, body, html_body)
 
         assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL] + email_to.split(','))
         assert smtplib_mock.lastsender == envelope_from
@@ -111,8 +110,8 @@
             'smtp_server': mailserver,
             'app_email_from': envelope_from,
         }
-        with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body)
+        with mock.patch('kallithea.model.notification.config', config_mock):
+            kallithea.model.notification.send_email(recipients, subject, body, html_body)
 
         assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL])
         assert smtplib_mock.lastsender == envelope_from
@@ -128,14 +127,14 @@
         subject = 'subject'
         body = 'body'
         html_body = 'html_body'
-        author = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        author = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
 
         config_mock = {
             'smtp_server': mailserver,
             'app_email_from': envelope_from,
         }
-        with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username)
+        with mock.patch('kallithea.model.notification.config', config_mock):
+            kallithea.model.notification.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username)
 
         assert smtplib_mock.lastdest == set(recipients)
         assert smtplib_mock.lastsender == envelope_from
@@ -152,14 +151,14 @@
         subject = 'subject'
         body = 'body'
         html_body = 'html_body'
-        author = User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
+        author = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN)
 
         config_mock = {
             'smtp_server': mailserver,
             'app_email_from': envelope_from,
         }
-        with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username)
+        with mock.patch('kallithea.model.notification.config', config_mock):
+            kallithea.model.notification.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username)
 
         assert smtplib_mock.lastdest == set(recipients)
         assert smtplib_mock.lastsender == envelope_from
@@ -175,15 +174,15 @@
         subject = 'subject'
         body = 'body'
         html_body = 'html_body'
-        author = User(name='foo', lastname='(fubar) "baz"')
+        author = db.User(name='foo', lastname='(fubar) "baz"')
         headers = {'extra': 'yes'}
 
         config_mock = {
             'smtp_server': mailserver,
             'app_email_from': envelope_from,
         }
-        with mock.patch('kallithea.lib.celerylib.tasks.config', config_mock):
-            kallithea.lib.celerylib.tasks.send_email(recipients, subject, body, html_body,
+        with mock.patch('kallithea.model.notification.config', config_mock):
+            kallithea.model.notification.send_email(recipients, subject, body, html_body,
                                                      from_name=author.full_name_or_username, headers=headers)
 
         assert smtplib_mock.lastdest == set(recipients)
@@ -192,6 +191,6 @@
         assert 'Subject: %s' % subject in smtplib_mock.lastmsg
         assert body in smtplib_mock.lastmsg
         assert html_body in smtplib_mock.lastmsg
-        assert 'Extra: yes' in smtplib_mock.lastmsg
+        assert 'extra: yes' in smtplib_mock.lastmsg
         # verify that headers dict hasn't mutated by send_email
         assert headers == {'extra': 'yes'}
--- a/kallithea/tests/other/test_validators.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/other/test_validators.py	Thu May 27 21:27:37 2021 +0200
@@ -2,8 +2,8 @@
 import formencode
 import pytest
 
+from kallithea.model import meta
 from kallithea.model import validators as v
-from kallithea.model.meta import Session
 from kallithea.model.repo_group import RepoGroupModel
 from kallithea.model.user_group import UserGroupModel
 from kallithea.tests import base
@@ -17,7 +17,7 @@
 class TestRepoGroups(base.TestController):
 
     def teardown_method(self, method):
-        Session.remove()
+        meta.Session.remove()
 
     def test_Message_extractor(self):
         validator = v.ValidUsername()
@@ -60,7 +60,7 @@
 
         gr = fixture.create_user_group('test')
         gr2 = fixture.create_user_group('tes2')
-        Session().commit()
+        meta.Session().commit()
         with pytest.raises(formencode.Invalid):
             validator.to_python('test')
         assert gr.users_group_id is not None
@@ -76,7 +76,7 @@
             validator.to_python('TEST')
         UserGroupModel().delete(gr)
         UserGroupModel().delete(gr2)
-        Session().commit()
+        meta.Session().commit()
 
     def test_ValidRepoGroup(self):
         validator = v.ValidRepoGroup()
--- a/kallithea/tests/other/test_vcs_operations.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/other/test_vcs_operations.py	Thu May 27 21:27:37 2021 +0200
@@ -36,10 +36,9 @@
 
 import pytest
 
-from kallithea import CONFIG
+import kallithea
 from kallithea.lib.utils2 import ascii_bytes, safe_str
-from kallithea.model.db import Repository, Ui, User, UserIpMap, UserLog
-from kallithea.model.meta import Session
+from kallithea.model import db, meta
 from kallithea.model.ssh_key import SshKeyModel
 from kallithea.model.user import UserModel
 from kallithea.tests import base
@@ -69,13 +68,13 @@
 
     @classmethod
     def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR):
-        user = User.get_by_username(username)
+        user = db.User.get_by_username(username)
         if user.ssh_keys:
             ssh_key = user.ssh_keys[0]
         else:
             sshkeymodel = SshKeyModel()
             ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username])
-            Session().commit()
+            meta.Session().commit()
 
         return cls._ssh_param(repo_name, user, ssh_key, client_ip)
 
@@ -101,7 +100,7 @@
         # Specify a custom ssh command on the command line
         return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % (
             client_ip,
-            CONFIG['__file__'],
+            kallithea.CONFIG['__file__'],
             user.user_id,
             ssh_key.user_ssh_key_id,
             repo_name)
@@ -112,7 +111,7 @@
         # Set a custom ssh command in the global environment
         os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % (
             client_ip,
-            CONFIG['__file__'],
+            kallithea.CONFIG['__file__'],
             user.user_id,
             ssh_key.user_ssh_key_id)
         return "ssh://someuser@somehost/%s""" % repo_name
@@ -221,10 +220,10 @@
 
 
 def set_anonymous_access(enable=True):
-    user = User.get_default_user()
+    user = db.User.get_default_user()
     user.active = enable
-    Session().commit()
-    if enable != User.get_default_user().active:
+    meta.Session().commit()
+    if enable != db.User.get_default_user().active:
         raise Exception('Cannot set anonymous access')
 
 
@@ -253,10 +252,10 @@
         yield
         # remove hook
         for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']:
-            entry = Ui.get_by_key('hooks', '%s.testhook' % hook)
+            entry = db.Ui.get_by_key('hooks', '%s.testhook' % hook)
             if entry:
-                Session().delete(entry)
-        Session().commit()
+                meta.Session().delete(entry)
+        meta.Session().commit()
 
     @pytest.fixture(scope="module")
     def testfork(self):
@@ -308,17 +307,17 @@
         if vt.repo_type == 'git':
             assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr
         elif vt.repo_type == 'hg':
-            assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout
+            assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout + stderr
 
     @parametrize_vcs_test
     def test_push_new_repo(self, webserver, vt):
         # Clear the log so we know what is added
-        UserLog.query().delete()
-        Session().commit()
+        db.UserLog.query().delete()
+        meta.Session().commit()
 
         # Create an empty server repo using the API
         repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence()))
-        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
+        usr = db.User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
         params = {
             "id": 7,
             "api_key": usr.api_key,
@@ -358,7 +357,8 @@
         # <UserLog('id:new_git_XXX:user_created_repo')>
         # <UserLog('id:new_git_XXX:pull')>
         # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')>
-        action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
+        meta.Session.close()  # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
+        action_parts = [ul.action.split(':', 1) for ul in db.UserLog.query().order_by(db.UserLog.user_log_id)]
         assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([
             ('started_following_repo', 0),
             ('user_created_repo', 0),
@@ -372,8 +372,8 @@
 
     @parametrize_vcs_test
     def test_push_new_file(self, webserver, testfork, vt):
-        UserLog.query().delete()
-        Session().commit()
+        db.UserLog.query().delete()
+        meta.Session().commit()
 
         dest_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
@@ -389,27 +389,29 @@
             assert 'Repository size' in stdout
             assert 'Last revision is now' in stdout
 
-        action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
+        meta.Session.close()  # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
+        action_parts = [ul.action.split(':', 1) for ul in db.UserLog.query().order_by(db.UserLog.user_log_id)]
         assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
             [('pull', 0), ('push', 3)]
 
     @parametrize_vcs_test
     def test_pull(self, webserver, testfork, vt):
-        UserLog.query().delete()
-        Session().commit()
+        db.UserLog.query().delete()
+        meta.Session().commit()
 
         dest_dir = _get_tmp_dir()
         stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir)
 
         clone_url = vt.repo_url_param(webserver, vt.repo_name)
         stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url)
+        meta.Session.close()  # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
 
         if vt.repo_type == 'git':
             assert 'FETCH_HEAD' in stderr
         elif vt.repo_type == 'hg':
             assert 'new changesets' in stdout
 
-        action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)]
+        action_parts = [ul.action for ul in db.UserLog.query().order_by(db.UserLog.user_log_id)]
         assert action_parts == ['pull']
 
         # Test handling of URLs with extra '/' around repo_name
@@ -426,7 +428,7 @@
             if vt.repo_type == 'git':
                 assert "abort: Access to './%s' denied" % vt.repo_name in stderr
             else:
-                assert "abort: Access to './%s' denied" % vt.repo_name in stdout
+                assert "abort: Access to './%s' denied" % vt.repo_name in stdout + stderr
 
         stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/%s/' % vt.repo_name), ignoreReturnCode=True)
         if vt.repo_type == 'git':
@@ -440,7 +442,7 @@
 
     @parametrize_vcs_test
     def test_push_invalidates_cache(self, webserver, testfork, vt):
-        pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
+        pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in db.Repository.query().filter(db.Repository.repo_name == testfork[vt.repo_type])]
 
         dest_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
@@ -448,12 +450,11 @@
 
         stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url)
 
-        Session().commit()  # expire test session to make sure SA fetch new Repository instances after last_changeset has been updated server side hook in other process
-
         if vt.repo_type == 'git':
             _check_proper_git_push(stdout, stderr)
 
-        post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
+        meta.Session.close()  # expire session to make sure SA fetches new Repository instances after last_changeset has been updated by server side hook in another process
+        post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in db.Repository.query().filter(db.Repository.repo_name == testfork[vt.repo_type])]
         assert pre_cached_tip != post_cached_tip
 
     @parametrize_vcs_test_http
@@ -473,8 +474,8 @@
 
     @parametrize_vcs_test
     def test_push_with_readonly_credentials(self, webserver, vt):
-        UserLog.query().delete()
-        Session().commit()
+        db.UserLog.query().delete()
+        meta.Session().commit()
 
         dest_dir = _get_tmp_dir()
         clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS)
@@ -487,7 +488,8 @@
         elif vt.repo_type == 'hg':
             assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout
 
-        action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
+        meta.Session.close()  # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
+        action_parts = [ul.action.split(':', 1) for ul in db.UserLog.query().order_by(db.UserLog.user_log_id)]
         assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
             [('pull', 0)]
 
@@ -513,7 +515,7 @@
         try:
             # Add IP constraint that excludes the test context:
             user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
-            Session().commit()
+            meta.Session().commit()
             # IP permissions are cached, need to wait for the cache in the server process to expire
             time.sleep(1.5)
             clone_url = vt.repo_url_param(webserver, vt.repo_name)
@@ -522,12 +524,12 @@
                 # The message apparently changed in Git 1.8.3, so match it loosely.
                 assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr
             elif vt.repo_type == 'hg':
-                assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout
+                assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout + stderr
         finally:
             # release IP restrictions
-            for ip in UserIpMap.query():
-                UserIpMap.delete(ip.ip_id)
-            Session().commit()
+            for ip in db.UserIpMap.query():
+                db.UserIpMap.delete(ip.ip_id)
+            meta.Session().commit()
             # IP permissions are cached, need to wait for the cache in the server process to expire
             time.sleep(1.5)
 
@@ -548,23 +550,23 @@
     @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
     def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt):
         # set prechangegroup to failing hook (returns True)
-        Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
-        Session().commit()
+        db.Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
+        meta.Session().commit()
         # clone repo
         clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
         dest_dir = _get_tmp_dir()
         stdout, stderr = Command(base.TESTS_TMP_PATH) \
             .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True)
         if vt.repo_type == 'hg':
-            assert 'preoutgoing.testhook hook failed' in stdout
+            assert 'preoutgoing.testhook hook failed' in stdout + stderr
         elif vt.repo_type == 'git':
             assert 'error: 406' in stderr
 
     @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
     def test_custom_hooks_prechangegroup(self, testhook_cleanup, webserver, testfork, vt):
         # set prechangegroup to failing hook (returns exit code 1)
-        Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
-        Session().commit()
+        db.Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
+        meta.Session().commit()
         # clone repo
         clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
         dest_dir = _get_tmp_dir()
@@ -580,8 +582,8 @@
         assert stdout != ''
 
         # set prechangegroup hook to exception throwing method
-        Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook')
-        Session().commit()
+        db.Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook')
+        meta.Session().commit()
         # re-try to push
         stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
         if vt is HgHttpVcsTest:
@@ -595,8 +597,8 @@
         assert stdout != ''
 
         # set prechangegroup hook to method that returns False
-        Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook')
-        Session().commit()
+        db.Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook')
+        meta.Session().commit()
         # re-try to push
         stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
         assert 'passing_test_hook succeeded' in stdout + stderr
--- a/kallithea/tests/performance/test_vcs.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/performance/test_vcs.py	Thu May 27 21:27:37 2021 +0200
@@ -14,7 +14,8 @@
 
 import pytest
 
-from kallithea.model.db import Repository
+from kallithea.lib.graphmod import graph_data
+from kallithea.model import db
 from kallithea.tests import base
 
 
@@ -23,8 +24,7 @@
 
     def graphmod(self, repo):
         """ Simple test for running the graph_data function for profiling/testing performance. """
-        from kallithea.lib.graphmod import graph_data
-        dbr = Repository.get_by_repo_name(repo)
+        dbr = db.Repository.get_by_repo_name(repo)
         scm_inst = dbr.scm_instance
         collection = scm_inst.get_changesets(start=0, end=None, branch_name=None)
         revs = [x.revision for x in collection]
--- a/kallithea/tests/scripts/manual_test_concurrency.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/scripts/manual_test_concurrency.py	Thu May 27 21:27:37 2021 +0200
@@ -37,17 +37,17 @@
 from paste.deploy import appconfig
 from sqlalchemy import engine_from_config
 
-from kallithea.config.environment import load_environment
-from kallithea.lib.auth import get_crypt_password
-from kallithea.model import meta
+import kallithea.config.application
+from kallithea.lib.utils2 import get_crypt_password
+from kallithea.model import db, meta
 from kallithea.model.base import init_model
-from kallithea.model.db import Repository, Ui, User
+from kallithea.model.repo import RepoModel
 from kallithea.tests.base import HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
 
 
 rel_path = dirname(dirname(dirname(dirname(os.path.abspath(__file__)))))
 conf = appconfig('config:development.ini', relative_to=rel_path)
-load_environment(conf.global_conf, conf.local_conf)
+kallithea.config.application.make_app(conf.global_conf, **conf.local_conf)
 
 USER = TEST_USER_ADMIN_LOGIN
 PASS = TEST_USER_ADMIN_PASS
@@ -88,18 +88,18 @@
     print('creating test user')
     sa = get_session()
 
-    user = sa.query(User).filter(User.username == USER).scalar()
+    user = sa.query(db.User).filter(db.User.username == USER).scalar()
 
     if force and user is not None:
         print('removing current user')
-        for repo in sa.query(Repository).filter(Repository.user == user).all():
+        for repo in sa.query(db.Repository).filter(db.Repository.user == user).all():
             sa.delete(repo)
         sa.delete(user)
         sa.commit()
 
     if user is None or force:
         print('creating new one')
-        new_usr = User()
+        new_usr = db.User()
         new_usr.username = USER
         new_usr.password = get_crypt_password(PASS)
         new_usr.email = 'mail@example.com'
@@ -115,14 +115,13 @@
 
 def create_test_repo(force=True):
     print('creating test repo')
-    from kallithea.model.repo import RepoModel
     sa = get_session()
 
-    user = sa.query(User).filter(User.username == USER).scalar()
+    user = sa.query(db.User).filter(db.User.username == USER).scalar()
     if user is None:
         raise Exception('user not found')
 
-    repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar()
+    repo = sa.query(db.Repository).filter(db.Repository.repo_name == HG_REPO).scalar()
 
     if repo is None:
         print('repo not found creating')
@@ -140,7 +139,7 @@
 
 def set_anonymous_access(enable=True):
     sa = get_session()
-    user = sa.query(User).filter(User.username == 'default').one()
+    user = sa.query(db.User).filter(db.User.username == 'default').one()
     user.active = enable
     sa.add(user)
     sa.commit()
@@ -148,7 +147,7 @@
 
 def get_anonymous_access():
     sa = get_session()
-    return sa.query(User).filter(User.username == 'default').one().active
+    return sa.query(db.User).filter(db.User.username == 'default').one().active
 
 
 #==============================================================================
@@ -156,7 +155,7 @@
 #==============================================================================
 def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD,
                                 backend='hg'):
-    cwd = path = os.path.join(Ui.get_by_key('paths', '/').ui_value, repo)
+    cwd = path = os.path.join(db.Ui.get_by_key('paths', '/').ui_value, repo)
 
     try:
         shutil.rmtree(path, ignore_errors=True)
@@ -200,7 +199,7 @@
             backend = 'hg'
 
         if METHOD == 'pull':
-            seq = next(tempfile._RandomNameSequence())
+            seq = next(tempfile._RandomNameSequence())  # pytype: disable=module-attr
             test_clone_with_credentials(repo=sys.argv[1], method='clone',
                                         backend=backend)
         s = time.time()
--- a/kallithea/tests/scripts/manual_test_crawler.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/scripts/manual_test_crawler.py	Thu May 27 21:27:37 2021 +0200
@@ -70,7 +70,7 @@
 ]
 
 
-cj = http.cookiejar.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt'))
+cj = http.cookiejar.FileCookieJar(os.path.join(tempfile.gettempdir(), 'test_cookie.txt'))
 o = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
 o.addheaders = [
     ('User-agent', 'kallithea-crawler'),
@@ -83,7 +83,6 @@
 def _get_repo(proj):
     if isinstance(proj, str):
         repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj))
-        proj = proj
     else:
         repo = proj
         proj = repo.name
--- a/kallithea/tests/vcs/__init__.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/vcs/__init__.py	Thu May 27 21:27:37 2021 +0200
@@ -9,54 +9,13 @@
 need to set the correct backend to use by setting the
 ``backend_alias`` property, which should correspond to one of the keys
 from ``vcs.backends.BACKENDS``.
-
-For each SCM we run tests for, we need some repository. We would use
-repositories location provided in test suite defaults - see ``conf``
-module for more detail. We simply try to check if repository at
-certain location exists, if not we would try to fetch them. At
-``test_vcs`` or ``test_common`` we run unit tests common for each
-repository type and for example specific mercurial tests are located
-at ``test_hg`` module.
 """
 
 import os
-import shutil
-
-from kallithea.tests.base import GIT_REMOTE_REPO, HG_REMOTE_REPO, TEST_GIT_REPO, TEST_HG_REPO, TESTS_TMP_PATH
-from kallithea.tests.vcs.utils import SCMFetcher
 
 
 # Base directory for the VCS tests.
 VCS_TEST_MODULE_BASE_DIR = os.path.abspath(os.path.dirname(__file__))
 
 # Path to user configuration file used during tests.
-TEST_USER_CONFIG_FILE = os.path.join(TESTS_TMP_PATH, 'aconfig')
-
-
-def setup_package():
-    """
-    Prepares whole package for tests which mainly means it would try to fetch
-    test repositories or use already existing ones.
-    """
-    fetchers = {
-        'hg': {
-            'alias': 'hg',
-            'test_repo_path': TEST_HG_REPO,
-            'remote_repo': HG_REMOTE_REPO,
-            'clone_cmd': 'hg clone --insecure',
-        },
-        'git': {
-            'alias': 'git',
-            'test_repo_path': TEST_GIT_REPO,
-            'remote_repo': GIT_REMOTE_REPO,
-            'clone_cmd': 'git clone --bare',
-        },
-    }
-
-    for scm, fetcher_info in fetchers.items():
-        fetcher = SCMFetcher(**fetcher_info)
-        fetcher.setup()
-
-    # Copy the test user configuration file to location where
-    # temporary test data is stored at.
-    shutil.copy(os.path.join(VCS_TEST_MODULE_BASE_DIR, 'aconfig'), TEST_USER_CONFIG_FILE)
+TEST_USER_CONFIG_FILE = os.path.join(VCS_TEST_MODULE_BASE_DIR, 'aconfig')
--- a/kallithea/tests/vcs/base.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/vcs/base.py	Thu May 27 21:27:37 2021 +0200
@@ -73,10 +73,13 @@
 
         for commit in cls._get_commits():
             for node in commit.get('added', []):
+                assert isinstance(node, FileNode)
                 cls.imc.add(FileNode(node.path, content=node.content))
             for node in commit.get('changed', []):
+                assert isinstance(node, FileNode)
                 cls.imc.change(FileNode(node.path, content=node.content))
             for node in commit.get('removed', []):
+                assert isinstance(node, FileNode)
                 cls.imc.remove(FileNode(node.path))
 
             cls.tip = cls.imc.commit(message=commit['message'],
--- a/kallithea/tests/vcs/test_git.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/vcs/test_git.py	Thu May 27 21:27:37 2021 +0200
@@ -780,9 +780,11 @@
 
         # Create a dictionary where keys are hook names, and values are paths to
         # them in the non-bare repo. Deduplicates code in tests a bit.
+        self.pre_receive = os.path.join(self.repo.path, '.git', 'hooks', "pre-receive")
+        self.post_receive = os.path.join(self.repo.path, '.git', 'hooks', "post-receive")
         self.kallithea_hooks = {
-            "pre-receive": os.path.join(self.repo.path, '.git', 'hooks', "pre-receive"),
-            "post-receive": os.path.join(self.repo.path, '.git', 'hooks', "post-receive"),
+            "pre-receive": self.pre_receive,
+            "post-receive": self.post_receive,
         }
 
     def test_hooks_created_if_missing(self):
@@ -794,10 +796,10 @@
             if os.path.exists(hook_path):
                 os.remove(hook_path)
 
-        ScmModel().install_git_hooks(repo=self.repo)
+        ScmModel().install_git_hooks(self.repo)
 
-        for hook, hook_path in self.kallithea_hooks.items():
-            assert os.path.exists(hook_path)
+        assert not os.path.exists(self.pre_receive)
+        assert os.path.exists(self.post_receive)
 
     def test_kallithea_hooks_updated(self):
         """
@@ -808,11 +810,11 @@
             with open(hook_path, "w") as f:
                 f.write("KALLITHEA_HOOK_VER=0.0.0\nJUST_BOGUS")
 
-        ScmModel().install_git_hooks(repo=self.repo)
+        ScmModel().install_git_hooks(self.repo)
 
-        for hook, hook_path in self.kallithea_hooks.items():
-            with open(hook_path) as f:
-                assert "JUST_BOGUS" not in f.read()
+        assert not os.path.exists(self.pre_receive)
+        with open(self.post_receive) as f:
+            assert "JUST_BOGUS" not in f.read()
 
     def test_custom_hooks_untouched(self):
         """
@@ -823,7 +825,7 @@
             with open(hook_path, "w") as f:
                 f.write("#!/bin/bash\n#CUSTOM_HOOK")
 
-        ScmModel().install_git_hooks(repo=self.repo)
+        ScmModel().install_git_hooks(self.repo)
 
         for hook, hook_path in self.kallithea_hooks.items():
             with open(hook_path) as f:
@@ -838,8 +840,9 @@
             with open(hook_path, "w") as f:
                 f.write("#!/bin/bash\n#CUSTOM_HOOK")
 
-        ScmModel().install_git_hooks(repo=self.repo, force_create=True)
+        ScmModel().install_git_hooks(self.repo, force=True)
 
-        for hook, hook_path in self.kallithea_hooks.items():
-            with open(hook_path) as f:
-                assert "KALLITHEA_HOOK_VER" in f.read()
+        with open(self.pre_receive) as f:
+            assert "KALLITHEA_HOOK_VER" not in f.read()
+        with open(self.post_receive) as f:
+            assert "KALLITHEA_HOOK_VER" in f.read()
--- a/kallithea/tests/vcs/test_repository.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/vcs/test_repository.py	Thu May 27 21:27:37 2021 +0200
@@ -1,3 +1,4 @@
+import copy
 import datetime
 
 import pytest
@@ -31,7 +32,6 @@
         assert self.repo == self.repo
 
     def test_repo_equality_broken_object(self):
-        import copy
         _repo = copy.copy(self.repo)
         delattr(_repo, 'path')
         assert self.repo != _repo
--- a/kallithea/tests/vcs/test_workdirs.py	Sun May 09 08:42:17 2021 +0200
+++ b/kallithea/tests/vcs/test_workdirs.py	Thu May 27 21:27:37 2021 +0200
@@ -2,6 +2,7 @@
 
 import pytest
 
+from kallithea.lib.vcs.exceptions import BranchDoesNotExistError
 from kallithea.lib.vcs.nodes import FileNode
 from kallithea.tests.vcs.base import _BackendTestMixin
 
@@ -67,7 +68,6 @@
         assert self.repo.workdir.get_changeset() == old_head
 
     def test_checkout_branch(self):
-        from kallithea.lib.vcs.exceptions import BranchDoesNotExistError
         # first, 'foobranch' does not exist.
         with pytest.raises(BranchDoesNotExistError):
             self.repo.workdir.checkout_branch(branch='foobranch')
--- a/kallithea/tests/vcs/utils.py	Sun May 09 08:42:17 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,54 +0,0 @@
-"""
-Utilities for tests only. These are not or should not be used normally -
-functions here are crafted as we don't want to use ``vcs`` to verify tests.
-"""
-import os
-import sys
-from subprocess import Popen
-
-
-def run_command(cmd, args):
-    """
-    Runs command on the system with given ``args``.
-    """
-    command = ' '.join((cmd, args))
-    p = Popen(command, shell=True)
-    status = os.waitpid(p.pid, 0)[1]
-    return status
-
-
-def eprint(msg):
-    """
-    Prints given ``msg`` into sys.stderr as nose test runner hides all output
-    from sys.stdout by default and if we want to pipe stream somewhere we don't
-    need those verbose messages anyway.
-    Appends line break.
-    """
-    sys.stderr.write(msg)
-    sys.stderr.write('\n')
-
-
-class SCMFetcher(object):
-
-    def __init__(self, alias, test_repo_path, remote_repo, clone_cmd):
-        """
-        :param clone_cmd: command which would clone remote repository; pass
-          only first bits - remote path and destination would be appended
-          using ``remote_repo`` and ``test_repo_path``
-        """
-        self.alias = alias
-        self.test_repo_path = test_repo_path
-        self.remote_repo = remote_repo
-        self.clone_cmd = clone_cmd
-
-    def setup(self):
-        if not os.path.isdir(self.test_repo_path):
-            self.fetch_repo()
-
-    def fetch_repo(self):
-        """
-        Tries to fetch repository from remote path.
-        """
-        remote = self.remote_repo
-        eprint("Fetching repository %s into %s" % (remote, self.test_repo_path))
-        run_command(self.clone_cmd,  '%s %s' % (remote, self.test_repo_path))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pytype_requirements.txt	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,9 @@
+# Note: we support pytype on Python3.9
+pytype==2021.4.1
+# optional modules that we want for pytype checking
+ipython
+kajiki
+psycopg2
+python-ldap
+python-pam
+tgext.debugbar
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/deps.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,293 @@
+#!/usr/bin/env python3
+
+
+import re
+import sys
+
+
+ignored_modules = set('''
+argparse
+base64
+bcrypt
+binascii
+bleach
+calendar
+celery
+celery
+chardet
+click
+collections
+configparser
+copy
+csv
+ctypes
+datetime
+dateutil
+decimal
+decorator
+difflib
+distutils
+docutils
+email
+errno
+fileinput
+functools
+getpass
+grp
+hashlib
+hmac
+html
+http
+imp
+importlib
+inspect
+io
+ipaddr
+IPython
+isapi_wsgi
+itertools
+json
+kajiki
+ldap
+logging
+mako
+markdown
+mimetypes
+mock
+msvcrt
+multiprocessing
+operator
+os
+paginate
+paginate_sqlalchemy
+pam
+paste
+pkg_resources
+platform
+posixpath
+pprint
+pwd
+pyflakes
+pytest
+pytest_localserver
+random
+re
+routes
+setuptools
+shlex
+shutil
+smtplib
+socket
+ssl
+stat
+string
+struct
+subprocess
+sys
+tarfile
+tempfile
+textwrap
+tgext
+threading
+time
+traceback
+traitlets
+types
+typing
+urllib
+urlobject
+uuid
+warnings
+webhelpers2
+webob
+webtest
+whoosh
+win32traceutil
+zipfile
+'''.split())
+
+top_modules = set('''
+kallithea.alembic
+kallithea.bin
+kallithea.config
+kallithea.controllers
+kallithea.templates.py
+scripts
+'''.split())
+
+bottom_external_modules = set('''
+tg
+mercurial
+sqlalchemy
+alembic
+formencode
+pygments
+dulwich
+beaker
+psycopg2
+docs
+setup
+conftest
+'''.split())
+
+normal_modules = set('''
+kallithea
+kallithea.controllers.base
+kallithea.lib
+kallithea.lib.auth
+kallithea.lib.auth_modules
+kallithea.lib.celerylib
+kallithea.lib.db_manage
+kallithea.lib.helpers
+kallithea.lib.hooks
+kallithea.lib.indexers
+kallithea.lib.utils
+kallithea.lib.utils2
+kallithea.lib.vcs
+kallithea.lib.webutils
+kallithea.model
+kallithea.model.async_tasks
+kallithea.model.scm
+kallithea.templates.py
+'''.split())
+
+shown_modules = normal_modules | top_modules
+
+# break the chains somehow - this is a cleanup TODO list
+known_violations = set([
+('kallithea.lib.auth_modules', 'kallithea.lib.auth'),  # needs base&facade
+('kallithea.lib.utils', 'kallithea.model'),  # clean up utils
+('kallithea.lib.utils', 'kallithea.model.db'),
+('kallithea.lib.utils', 'kallithea.model.scm'),
+('kallithea.model', 'kallithea.lib.auth'),  # auth.HasXXX
+('kallithea.model', 'kallithea.lib.auth_modules'),  # validators
+('kallithea.model', 'kallithea.lib.hooks'),  # clean up hooks
+('kallithea.model', 'kallithea.model.scm'),
+('kallithea.model.scm', 'kallithea.lib.hooks'),
+])
+
+extra_edges = [
+('kallithea.config', 'kallithea.controllers'),  # through TG
+('kallithea.lib.auth', 'kallithea.lib.auth_modules'),  # custom loader
+]
+
+
+def normalize(s):
+    """Given a string with dot path, return the string it should be shown as."""
+    parts = s.replace('.__init__', '').split('.')
+    short_2 = '.'.join(parts[:2])
+    short_3 = '.'.join(parts[:3])
+    short_4 = '.'.join(parts[:4])
+    if parts[0] in ['scripts', 'contributor_data', 'i18n_utils']:
+        return 'scripts'
+    if short_3 == 'kallithea.model.meta':
+        return 'kallithea.model.db'
+    if parts[:4] == ['kallithea', 'lib', 'vcs', 'ssh']:
+        return 'kallithea.lib.vcs.ssh'
+    if short_4 in shown_modules:
+        return short_4
+    if short_3 in shown_modules:
+        return short_3
+    if short_2 in shown_modules:
+        return short_2
+    if short_2 == 'kallithea.tests':
+        return None
+    if parts[0] in ignored_modules:
+        return None
+    assert parts[0] in bottom_external_modules, parts
+    return parts[0]
+
+
+def main(filenames):
+    if not filenames or filenames[0].startswith('-'):
+        print('''\
+Usage:
+    hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/deps.py
+    dot -Tsvg deps.dot > deps.svg
+        ''')
+        raise SystemExit(1)
+
+    files_imports = dict()  # map filenames to its imports
+    import_deps = set()  # set of tuples with module name and its imports
+    for fn in filenames:
+        with open(fn) as f:
+            s = f.read()
+
+        dot_name = (fn[:-3] if fn.endswith('.py') else fn).replace('/', '.')
+        file_imports = set()
+        for m in re.finditer(r'^ *(?:from ([^ ]*) import (?:([a-zA-Z].*)|\(([^)]*)\))|import (.*))$', s, re.MULTILINE):
+            m_from, m_from_import, m_from_import2, m_import = m.groups()
+            if m_from:
+                pre = m_from + '.'
+                if pre.startswith('.'):
+                    pre = dot_name.rsplit('.', 1)[0] + pre
+                importlist = m_from_import or m_from_import2
+            else:
+                pre = ''
+                importlist = m_import
+            for imp in importlist.split('#', 1)[0].split(','):
+                full_imp = pre + imp.strip().split(' as ', 1)[0]
+                file_imports.add(full_imp)
+                import_deps.add((dot_name, full_imp))
+        files_imports[fn] = file_imports
+
+    # dump out all deps for debugging and analysis
+    with open('deps.txt', 'w') as f:
+        for fn, file_imports in sorted(files_imports.items()):
+            for file_import in sorted(file_imports):
+                if file_import.split('.', 1)[0] in ignored_modules:
+                    continue
+                f.write('%s: %s\n' % (fn, file_import))
+
+    # find leafs that haven't been ignored - they are the important external dependencies and shown in the bottom row
+    only_imported = set(
+        set(normalize(b) for a, b in import_deps) -
+        set(normalize(a) for a, b in import_deps) -
+        set([None, 'kallithea'])
+    )
+
+    normalized_dep_edges = set()
+    for dot_name, full_imp in import_deps:
+        a = normalize(dot_name)
+        b = normalize(full_imp)
+        if a is None or b is None or a == b:
+            continue
+        normalized_dep_edges.add((a, b))
+        #print((dot_name, full_imp, a, b))
+    normalized_dep_edges.update(extra_edges)
+
+    unseen_shown_modules = shown_modules.difference(a for a, b in normalized_dep_edges).difference(b for a, b in normalized_dep_edges)
+    assert not unseen_shown_modules, unseen_shown_modules
+
+    with open('deps.dot', 'w') as f:
+        f.write('digraph {\n')
+        f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(top_modules)))
+        f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(only_imported)))
+        for a, b in sorted(normalized_dep_edges):
+            f.write('  "%s" -> "%s"%s\n' % (a, b, ' [color=red]' if (a, b) in known_violations else ' [color=green]' if (a, b) in extra_edges else ''))
+        f.write('}\n')
+
+    # verify dependencies by untangling dependency chain bottom-up:
+    todo = set(normalized_dep_edges)
+    unseen_violations = known_violations.difference(todo)
+    assert not unseen_violations, unseen_violations
+    for x in known_violations:
+        todo.remove(x)
+
+    while todo:
+        depending = set(a for a, b in todo)
+        depended = set(b for a, b in todo)
+        drop = depended - depending
+        if not drop:
+            print('ERROR: cycles:', len(todo))
+            for x in sorted(todo):
+                print('%s,' % (x,))
+            raise SystemExit(1)
+        #for do_b in sorted(drop):
+        #    print('Picking', do_b, '- unblocks:', ' '.join(a for a, b in sorted((todo)) if b == do_b))
+        todo = set((a, b) for a, b in todo if b in depending)
+        #print()
+
+
+if __name__ == '__main__':
+    main(sys.argv[1:])
--- a/scripts/docs-headings.py	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/docs-headings.py	Thu May 27 21:27:37 2021 +0200
@@ -31,7 +31,7 @@
 
 
 def main():
-    filenames = subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines()
+    filenames = subprocess.check_output(['hg', 'files', 'set:**.rst+kallithea/i18n/how_to']).splitlines()
     for fn in filenames:
         fn = fn.decode()
         print('processing %s' % fn)
--- a/scripts/generate-ini.py	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/generate-ini.py	Thu May 27 21:27:37 2021 +0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 """
-Based on kallithea/lib/paster_commands/template.ini.mako, generate development.ini
+Generate development.ini based on the ini template.
 """
 
 import re
--- a/scripts/i18n	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/i18n	Thu May 27 21:27:37 2021 +0200
@@ -19,7 +19,6 @@
 import sys
 
 import click
-
 import i18n_utils
 
 
@@ -90,11 +89,8 @@
 
     and then invoke merge/rebase/graft with the additional argument '--tool i18n'.
     """
-    from mercurial import (
-        context,
-        simplemerge,
-        ui as uimod,
-    )
+    from mercurial import context, simplemerge
+    from mercurial import ui as uimod
 
     print('i18n normalized-merge: normalizing and merging %s' % output)
 
--- a/scripts/i18n_utils.py	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/i18n_utils.py	Thu May 27 21:27:37 2021 +0200
@@ -11,8 +11,6 @@
 # You should have received a copy of the GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from __future__ import print_function
-
 import os
 import re
 import shutil
--- a/scripts/logformat.py	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/logformat.py	Thu May 27 21:27:37 2021 +0200
@@ -40,7 +40,7 @@
     if len(sys.argv) < 2:
         print('Cleanup of superfluous % formatting of log statements.')
         print('Usage:')
-        print('''  hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''')
+        print('''  hg revert `hg files 'set:**.py'|grep -v logformat.py` && scripts/logformat.py `hg files 'set:**.py'` && hg diff''')
         raise SystemExit(1)
 
     for f in sys.argv[1:]:
--- a/scripts/make-release	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/make-release	Thu May 27 21:27:37 2021 +0200
@@ -75,7 +75,7 @@
 xdg-open https://readthedocs.org/projects/kallithea/
 curl -X POST http://readthedocs.org/build/kallithea
 xdg-open https://readthedocs.org/projects/kallithea/builds
-xdg-open http://docs.kallithea-scm.org/en/latest/ # or whatever the branch is
+xdg-open https://docs.kallithea-scm.org/en/latest/ # or whatever the branch is
 
 twine upload dist/*
 xdg-open https://pypi.python.org/pypi/Kallithea
--- a/scripts/run-all-cleanup	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/run-all-cleanup	Thu May 27 21:27:37 2021 +0200
@@ -5,9 +5,13 @@
 set -e
 set -x
 
+hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/deps.py
+dot -Tsvg deps.dot > deps.svg
+
 scripts/docs-headings.py
 scripts/generate-ini.py
 scripts/whitespacecleanup.sh
+hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/source_format.py
 
-hg loc 'set:!binary()&grep("^#!.*python")' '*.py' | xargs scripts/pyflakes
+hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/pyflakes
 echo "no blocking problems found by $0"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/source_format.py	Thu May 27 21:27:37 2021 +0200
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+
+# hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/source_format.py
+
+import re
+import sys
+
+
+filenames = sys.argv[1:]
+
+for fn in filenames:
+    with open(fn) as f:
+        org_content = f.read()
+
+    mod_name = fn[:-3] if fn.endswith('.py') else fn
+    mod_name = mod_name[:-9] if mod_name.endswith('/__init__') else mod_name
+    mod_name = mod_name.replace('/', '.')
+    def f(m):
+        return '"""\n%s\n%s\n' % (mod_name, '~' * len(mod_name))
+    new_content = re.sub(r'^"""\n(kallithea\..*\n)(~+\n)?', f, org_content, count=1, flags=re.MULTILINE)
+
+    if new_content != org_content:
+        with open(fn, 'w') as f:
+            f.write(new_content)
--- a/scripts/whitespacecleanup.sh	Sun May 09 08:42:17 2021 +0200
+++ b/scripts/whitespacecleanup.sh	Thu May 27 21:27:37 2021 +0200
@@ -2,7 +2,7 @@
 
 # Enforce some consistency in whitespace - just to avoid spurious whitespaces changes
 
-files=`hg mani | egrep -v '/fontello/|/email_templates/|(^LICENSE-MERGELY.html|^docs/Makefile|^scripts/whitespacecleanup.sh|/(graph|mergely|native.history)\.js|/test_dump_html_mails.ref.html|\.png|\.gif|\.ico|\.pot|\.po|\.mo|\.tar\.gz|\.diff)$'`
+files=`hg files | egrep -v '/fontello/|/templates/email/|(^LICENSE-MERGELY.html|^docs/Makefile|^scripts/whitespacecleanup.sh|/(graph|mergely|native.history)\.js|/test_dump_html_mails.ref.html|\.png|\.gif|\.ico|\.pot|\.po|\.mo|\.tar\.gz|\.diff)$'`
 
 sed -i "s/`printf '\r'`//g" $files
 sed -i -e "s,`printf '\t'`,    ,g" $files
@@ -11,14 +11,14 @@
 # ensure one trailing newline - remove empty last line and make last line include trailing newline:
 sed -i -e '$,${/^$/d}' -e '$a\' $files
 
-sed -i -e 's,\([^ /]\){,\1 {,g' `hg loc '*.css'`
-sed -i -e 's|^\([^ /].*,\)\([^ ]\)|\1 \2|g' `hg loc '*.css'`
+sed -i -e 's,\([^ /]\){,\1 {,g' `hg files 'set:**.css'`
+sed -i -e 's|^\([^ /].*,\)\([^ ]\)|\1 \2|g' `hg files 'set:**.css'`
 
-hg mani | xargs chmod -x
-hg loc 'set:!binary()&grep("^#!")&!(**_tmpl.py)&!(**/template**)' | xargs chmod +x
+hg files | xargs chmod -x
+hg files 'set:!binary()&grep("^#!")&!(**_tmpl.py)&!(**/template**)' | xargs chmod +x
 
 # isort is installed from dev_requirements.txt
-hg loc 'set:!binary()&grep("^#!.*python")' '*.py' | xargs isort --line-width 160 --lines-after-imports 2
+hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs isort --line-width 160 --lines-after-imports 2
 
 echo "diff after $0:"
 hg diff
--- a/setup.cfg	Sun May 09 08:42:17 2021 +0200
+++ b/setup.cfg	Thu May 27 21:27:37 2021 +0200
@@ -35,3 +35,14 @@
 
 [upload_sphinx]
 upload-dir = docs/_build/html
+
+[pytype]
+inputs =
+    kallithea
+    setup.py
+exclude =
+    **/test_*.py
+disable =
+    pyi-error
+keep_going = True
+#jobs = 3
--- a/setup.py	Sun May 09 08:42:17 2021 +0200
+++ b/setup.py	Thu May 27 21:27:37 2021 +0200
@@ -2,6 +2,7 @@
 # -*- coding: utf-8 -*-
 import os
 import platform
+import re
 import sys
 
 import setuptools
@@ -17,7 +18,6 @@
 
 
 def _get_meta_var(name, data, callback_handler=None):
-    import re
     matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
     if matches:
         s = eval(matches.groups()[0])
@@ -53,9 +53,9 @@
     "FormEncode >= 1.3.1, < 1.4",
     "SQLAlchemy >= 1.2.9, < 1.4",
     "Mako >= 0.9.1, < 1.2",
-    "Pygments >= 2.2.0, < 2.6",
+    "Pygments >= 2.2.0, < 2.7",
     "Whoosh >= 2.7.1, < 2.8",
-    "celery >= 4.3, < 4.5, != 4.4.4", # 4.4.4 is broken due to unexpressed dependency on 'future', see https://github.com/celery/celery/pull/6146
+    "celery >= 5, < 5.1",
     "Babel >= 1.3, < 2.9",
     "python-dateutil >= 2.1.0, < 2.9",
     "Markdown >= 2.2.1, < 3.2",
@@ -63,9 +63,9 @@
     "URLObject >= 2.3.4, < 2.5",
     "Routes >= 2.0, < 2.5",
     "dulwich >= 0.19.0, < 0.20",
-    "mercurial >= 5.2, < 5.5",
+    "mercurial >= 5.2, < 5.9",
     "decorator >= 4.2.1, < 4.5",
-    "Paste >= 2.0.3, < 3.4",
+    "Paste >= 2.0.3, < 3.5",
     "bleach >= 3.0, < 3.1.4",
     "Click >= 7.0, < 8",
     "ipaddr >= 2.2.0, < 2.3",
@@ -73,6 +73,7 @@
     "paginate_sqlalchemy >= 0.3.0, < 0.4",
     "bcrypt >= 3.1.0, < 3.2",
     "pip >= 20.0, < 999",
+    "chardet >= 3",
 ]
 
 dependency_links = [
@@ -156,6 +157,6 @@
     kallithea-cli =    kallithea.bin.kallithea_cli:cli
 
     [paste.app_factory]
-    main = kallithea.config.middleware:make_app
+    main = kallithea.config.application:make_app
     """,
 )